repo_id stringclasses 875
values | size int64 974 38.9k | file_path stringlengths 10 308 | content stringlengths 974 38.9k |
|---|---|---|---|
googleapis/google-cloud-java | 37,715 | java-datalineage/proto-google-cloud-datalineage-v1/src/main/java/com/google/cloud/datacatalog/lineage/v1/ListProcessesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datacatalog/lineage/v1/lineage.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datacatalog.lineage.v1;
/**
*
*
* <pre>
* Response message for
* [ListProcesses][google.cloud.datacatalog.lineage.v1.ListProcesses].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.lineage.v1.ListProcessesResponse}
*/
public final class ListProcessesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datacatalog.lineage.v1.ListProcessesResponse)
ListProcessesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListProcessesResponse.newBuilder() to construct.
private ListProcessesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListProcessesResponse() {
processes_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListProcessesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_ListProcessesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_ListProcessesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse.class,
com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse.Builder.class);
}
public static final int PROCESSES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.datacatalog.lineage.v1.Process> processes_;
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.datacatalog.lineage.v1.Process> getProcessesList() {
return processes_;
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.datacatalog.lineage.v1.ProcessOrBuilder>
getProcessesOrBuilderList() {
return processes_;
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
@java.lang.Override
public int getProcessesCount() {
return processes_.size();
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.Process getProcesses(int index) {
return processes_.get(index);
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.ProcessOrBuilder getProcessesOrBuilder(int index) {
return processes_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The token to specify as `page_token` in the next call to get the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The token to specify as `page_token` in the next call to get the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < processes_.size(); i++) {
output.writeMessage(1, processes_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < processes_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, processes_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse)) {
return super.equals(obj);
}
com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse other =
(com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse) obj;
if (!getProcessesList().equals(other.getProcessesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getProcessesCount() > 0) {
hash = (37 * hash) + PROCESSES_FIELD_NUMBER;
hash = (53 * hash) + getProcessesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [ListProcesses][google.cloud.datacatalog.lineage.v1.ListProcesses].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.lineage.v1.ListProcessesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.lineage.v1.ListProcessesResponse)
com.google.cloud.datacatalog.lineage.v1.ListProcessesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_ListProcessesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_ListProcessesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse.class,
com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse.Builder.class);
}
// Construct using com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (processesBuilder_ == null) {
processes_ = java.util.Collections.emptyList();
} else {
processes_ = null;
processesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_ListProcessesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse
getDefaultInstanceForType() {
return com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse build() {
com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse buildPartial() {
com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse result =
new com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse result) {
if (processesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
processes_ = java.util.Collections.unmodifiableList(processes_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.processes_ = processes_;
} else {
result.processes_ = processesBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse) {
return mergeFrom((com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse other) {
if (other
== com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse.getDefaultInstance())
return this;
if (processesBuilder_ == null) {
if (!other.processes_.isEmpty()) {
if (processes_.isEmpty()) {
processes_ = other.processes_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureProcessesIsMutable();
processes_.addAll(other.processes_);
}
onChanged();
}
} else {
if (!other.processes_.isEmpty()) {
if (processesBuilder_.isEmpty()) {
processesBuilder_.dispose();
processesBuilder_ = null;
processes_ = other.processes_;
bitField0_ = (bitField0_ & ~0x00000001);
processesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getProcessesFieldBuilder()
: null;
} else {
processesBuilder_.addAllMessages(other.processes_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.datacatalog.lineage.v1.Process m =
input.readMessage(
com.google.cloud.datacatalog.lineage.v1.Process.parser(),
extensionRegistry);
if (processesBuilder_ == null) {
ensureProcessesIsMutable();
processes_.add(m);
} else {
processesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.datacatalog.lineage.v1.Process> processes_ =
java.util.Collections.emptyList();
private void ensureProcessesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
processes_ =
new java.util.ArrayList<com.google.cloud.datacatalog.lineage.v1.Process>(processes_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datacatalog.lineage.v1.Process,
com.google.cloud.datacatalog.lineage.v1.Process.Builder,
com.google.cloud.datacatalog.lineage.v1.ProcessOrBuilder>
processesBuilder_;
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
public java.util.List<com.google.cloud.datacatalog.lineage.v1.Process> getProcessesList() {
if (processesBuilder_ == null) {
return java.util.Collections.unmodifiableList(processes_);
} else {
return processesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
public int getProcessesCount() {
if (processesBuilder_ == null) {
return processes_.size();
} else {
return processesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
public com.google.cloud.datacatalog.lineage.v1.Process getProcesses(int index) {
if (processesBuilder_ == null) {
return processes_.get(index);
} else {
return processesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
public Builder setProcesses(int index, com.google.cloud.datacatalog.lineage.v1.Process value) {
if (processesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureProcessesIsMutable();
processes_.set(index, value);
onChanged();
} else {
processesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
public Builder setProcesses(
int index, com.google.cloud.datacatalog.lineage.v1.Process.Builder builderForValue) {
if (processesBuilder_ == null) {
ensureProcessesIsMutable();
processes_.set(index, builderForValue.build());
onChanged();
} else {
processesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
public Builder addProcesses(com.google.cloud.datacatalog.lineage.v1.Process value) {
if (processesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureProcessesIsMutable();
processes_.add(value);
onChanged();
} else {
processesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
public Builder addProcesses(int index, com.google.cloud.datacatalog.lineage.v1.Process value) {
if (processesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureProcessesIsMutable();
processes_.add(index, value);
onChanged();
} else {
processesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
public Builder addProcesses(
com.google.cloud.datacatalog.lineage.v1.Process.Builder builderForValue) {
if (processesBuilder_ == null) {
ensureProcessesIsMutable();
processes_.add(builderForValue.build());
onChanged();
} else {
processesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
public Builder addProcesses(
int index, com.google.cloud.datacatalog.lineage.v1.Process.Builder builderForValue) {
if (processesBuilder_ == null) {
ensureProcessesIsMutable();
processes_.add(index, builderForValue.build());
onChanged();
} else {
processesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
public Builder addAllProcesses(
java.lang.Iterable<? extends com.google.cloud.datacatalog.lineage.v1.Process> values) {
if (processesBuilder_ == null) {
ensureProcessesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, processes_);
onChanged();
} else {
processesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
public Builder clearProcesses() {
if (processesBuilder_ == null) {
processes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
processesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
public Builder removeProcesses(int index) {
if (processesBuilder_ == null) {
ensureProcessesIsMutable();
processes_.remove(index);
onChanged();
} else {
processesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
public com.google.cloud.datacatalog.lineage.v1.Process.Builder getProcessesBuilder(int index) {
return getProcessesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
public com.google.cloud.datacatalog.lineage.v1.ProcessOrBuilder getProcessesOrBuilder(
int index) {
if (processesBuilder_ == null) {
return processes_.get(index);
} else {
return processesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
public java.util.List<? extends com.google.cloud.datacatalog.lineage.v1.ProcessOrBuilder>
getProcessesOrBuilderList() {
if (processesBuilder_ != null) {
return processesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(processes_);
}
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
public com.google.cloud.datacatalog.lineage.v1.Process.Builder addProcessesBuilder() {
return getProcessesFieldBuilder()
.addBuilder(com.google.cloud.datacatalog.lineage.v1.Process.getDefaultInstance());
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
public com.google.cloud.datacatalog.lineage.v1.Process.Builder addProcessesBuilder(int index) {
return getProcessesFieldBuilder()
.addBuilder(index, com.google.cloud.datacatalog.lineage.v1.Process.getDefaultInstance());
}
/**
*
*
* <pre>
* The processes from the specified project and location.
* </pre>
*
* <code>repeated .google.cloud.datacatalog.lineage.v1.Process processes = 1;</code>
*/
public java.util.List<com.google.cloud.datacatalog.lineage.v1.Process.Builder>
getProcessesBuilderList() {
return getProcessesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datacatalog.lineage.v1.Process,
com.google.cloud.datacatalog.lineage.v1.Process.Builder,
com.google.cloud.datacatalog.lineage.v1.ProcessOrBuilder>
getProcessesFieldBuilder() {
if (processesBuilder_ == null) {
processesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datacatalog.lineage.v1.Process,
com.google.cloud.datacatalog.lineage.v1.Process.Builder,
com.google.cloud.datacatalog.lineage.v1.ProcessOrBuilder>(
processes_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
processes_ = null;
}
return processesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The token to specify as `page_token` in the next call to get the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The token to specify as `page_token` in the next call to get the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The token to specify as `page_token` in the next call to get the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The token to specify as `page_token` in the next call to get the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The token to specify as `page_token` in the next call to get the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.lineage.v1.ListProcessesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.datacatalog.lineage.v1.ListProcessesResponse)
private static final com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse();
}
public static com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListProcessesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListProcessesResponse>() {
@java.lang.Override
public ListProcessesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListProcessesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListProcessesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/kafka | 37,982 | streams/src/main/java/org/apache/kafka/streams/processor/internals/ProcessorStateManager.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.processor.internals;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.utils.FixedOrderMap;
import org.apache.kafka.common.utils.LogContext;
import org.apache.kafka.streams.errors.ProcessorStateException;
import org.apache.kafka.streams.errors.StreamsException;
import org.apache.kafka.streams.errors.TaskCorruptedException;
import org.apache.kafka.streams.errors.TaskMigratedException;
import org.apache.kafka.streams.errors.internals.FailedProcessingException;
import org.apache.kafka.streams.processor.CommitCallback;
import org.apache.kafka.streams.processor.StateRestoreCallback;
import org.apache.kafka.streams.processor.StateRestoreListener;
import org.apache.kafka.streams.processor.StateStore;
import org.apache.kafka.streams.processor.TaskId;
import org.apache.kafka.streams.processor.internals.Task.TaskType;
import org.apache.kafka.streams.state.internals.CachedStateStore;
import org.apache.kafka.streams.state.internals.OffsetCheckpoint;
import org.apache.kafka.streams.state.internals.RecordConverter;
import org.apache.kafka.streams.state.internals.TimeOrderedKeyValueBuffer;
import org.slf4j.Logger;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.OptionalLong;
import java.util.Set;
import java.util.stream.Collectors;
import static java.lang.String.format;
import static org.apache.kafka.streams.processor.internals.StateManagerUtil.CHECKPOINT_FILE_NAME;
import static org.apache.kafka.streams.processor.internals.StateManagerUtil.converterForStore;
import static org.apache.kafka.streams.processor.internals.StateRestoreCallbackAdapter.adapt;
import static org.apache.kafka.streams.state.internals.OffsetCheckpoint.OFFSET_UNKNOWN;
/**
* ProcessorStateManager is the source of truth for the current offset for each state store,
* which is either the read offset during restoring, or the written offset during normal processing.
*
* <p>The offset is initialized as null when the state store is registered, and then it can be updated by
* loading checkpoint file, restore state stores, or passing from the record collector's written offsets.
*
* <p>When checkpointing, if the offset is not null it would be written to the file.
*
* <p>The manager is also responsible for restoring state stores via their registered restore callback,
* which is used for both updating standby tasks as well as restoring active tasks.
*/
public class ProcessorStateManager implements StateManager {
public static class StateStoreMetadata {
private final StateStore stateStore;
// corresponding changelog partition of the store, this and the following two fields
// will only be not-null if the state store is logged (i.e. changelog partition and restorer provided)
private final TopicPartition changelogPartition;
// could be used for both active restoration and standby
private final StateRestoreCallback restoreCallback;
private final CommitCallback commitCallback;
// record converters used for restoration and standby
private final RecordConverter recordConverter;
// indicating the current snapshot of the store as the offset of last changelog record that has been
// applied to the store used for both restoration (active and standby tasks restored offset) and
// normal processing that update stores (written offset); could be null (when initialized)
//
// the offset is updated in three ways:
// 1. when loading from the checkpoint file, when the corresponding task has acquired the state
// directory lock and have registered all the state store; it is only one-time
// 2. when updating with restore records (by both restoring active and standby),
// update to the last restore record's offset
// 3. when checkpointing with the given written offsets from record collector,
// update blindly with the given offset
private Long offset;
// Will be updated on batch restored
private Long endOffset;
// corrupted state store should not be included in checkpointing
private boolean corrupted;
private StateStoreMetadata(final StateStore stateStore,
final CommitCallback commitCallback) {
this.stateStore = stateStore;
this.commitCallback = commitCallback;
this.restoreCallback = null;
this.recordConverter = null;
this.changelogPartition = null;
this.corrupted = false;
this.offset = null;
}
private StateStoreMetadata(final StateStore stateStore,
final TopicPartition changelogPartition,
final StateRestoreCallback restoreCallback,
final CommitCallback commitCallback,
final RecordConverter recordConverter) {
if (restoreCallback == null) {
throw new IllegalStateException("Log enabled store should always provide a restore callback upon registration");
}
this.stateStore = stateStore;
this.changelogPartition = changelogPartition;
this.restoreCallback = restoreCallback;
this.commitCallback = commitCallback;
this.recordConverter = recordConverter;
this.offset = null;
}
private void setOffset(final Long offset) {
this.offset = offset;
}
// the offset is exposed to the changelog reader to determine if restoration is completed
Long offset() {
return this.offset;
}
Long endOffset() {
return this.endOffset;
}
public void setEndOffset(final Long endOffset) {
this.endOffset = endOffset;
}
TopicPartition changelogPartition() {
return this.changelogPartition;
}
StateStore store() {
return this.stateStore;
}
@Override
public String toString() {
return "StateStoreMetadata (" + stateStore.name() + " : " + changelogPartition + " @ " + offset;
}
}
private static final String STATE_CHANGELOG_TOPIC_SUFFIX = "-changelog";
private String logPrefix;
private final TaskId taskId;
private final boolean eosEnabled;
private ChangelogRegister changelogReader;
private final Collection<TopicPartition> sourcePartitions;
private final Map<String, String> storeToChangelogTopic;
// must be maintained in topological order
private final FixedOrderMap<String, StateStoreMetadata> stores = new FixedOrderMap<>();
private final FixedOrderMap<String, StateStore> globalStores = new FixedOrderMap<>();
private final File baseDir;
private final OffsetCheckpoint checkpointFile;
private final boolean stateUpdaterEnabled;
private TaskType taskType;
private Logger log;
private Task.State taskState;
public static String storeChangelogTopic(final String prefix, final String storeName, final String namedTopology) {
if (namedTopology == null) {
return prefix + "-" + storeName + STATE_CHANGELOG_TOPIC_SUFFIX;
} else {
return prefix + "-" + namedTopology + "-" + storeName + STATE_CHANGELOG_TOPIC_SUFFIX;
}
}
/**
* @throws ProcessorStateException if the task directory does not exist and could not be created
*/
public ProcessorStateManager(final TaskId taskId,
final TaskType taskType,
final boolean eosEnabled,
final LogContext logContext,
final StateDirectory stateDirectory,
final ChangelogRegister changelogReader,
final Map<String, String> storeToChangelogTopic,
final Collection<TopicPartition> sourcePartitions,
final boolean stateUpdaterEnabled) throws ProcessorStateException {
this.storeToChangelogTopic = storeToChangelogTopic;
this.log = logContext.logger(ProcessorStateManager.class);
this.logPrefix = logContext.logPrefix();
this.taskId = taskId;
this.taskType = taskType;
this.eosEnabled = eosEnabled;
this.changelogReader = changelogReader;
this.sourcePartitions = sourcePartitions;
this.stateUpdaterEnabled = stateUpdaterEnabled;
this.baseDir = stateDirectory.getOrCreateDirectoryForTask(taskId);
this.checkpointFile = new OffsetCheckpoint(stateDirectory.checkpointFileFor(taskId));
log.debug("Created state store manager for task {}", taskId);
}
/**
* Special constructor used by {@link StateDirectory} to partially initialize startup tasks for local state, before
* they're assigned to a thread. When the task is assigned to a thread, the initialization of this StateManager is
* completed in {@link #assignToStreamThread(LogContext, ChangelogRegister, Collection)}.
*/
static ProcessorStateManager createStartupTaskStateManager(final TaskId taskId,
final boolean eosEnabled,
final LogContext logContext,
final StateDirectory stateDirectory,
final Map<String, String> storeToChangelogTopic,
final Set<TopicPartition> sourcePartitions,
final boolean stateUpdaterEnabled) {
return new ProcessorStateManager(taskId, TaskType.STANDBY, eosEnabled, logContext, stateDirectory, null, storeToChangelogTopic, sourcePartitions, stateUpdaterEnabled);
}
/**
* Standby tasks initialized for local state on-startup are only partially initialized, because they are not yet
* assigned to a StreamThread. Once assigned to a StreamThread, we complete their initialization here using the
* assigned StreamThread's context.
*/
void assignToStreamThread(final LogContext logContext,
final ChangelogRegister changelogReader,
final Collection<TopicPartition> sourcePartitions) {
if (this.changelogReader != null) {
throw new IllegalStateException("Attempted to replace an existing changelogReader on a StateManager without closing it.");
}
this.sourcePartitions.clear();
this.log = logContext.logger(ProcessorStateManager.class);
this.logPrefix = logContext.logPrefix();
this.changelogReader = changelogReader;
this.sourcePartitions.addAll(sourcePartitions);
}
void registerStateStores(final List<StateStore> allStores, final InternalProcessorContext<?, ?> processorContext) {
processorContext.uninitialize();
for (final StateStore store : allStores) {
if (stores.containsKey(store.name())) {
if (!stateUpdaterEnabled) {
maybeRegisterStoreWithChangelogReader(store.name());
}
} else {
store.init(processorContext, store);
}
log.trace("Registered state store {}", store.name());
}
}
void registerGlobalStateStores(final List<StateStore> stateStores) {
log.debug("Register global stores {}", stateStores);
for (final StateStore stateStore : stateStores) {
globalStores.put(stateStore.name(), stateStore);
}
}
@Override
public StateStore globalStore(final String name) {
return globalStores.get(name);
}
// package-private for test only
void initializeStoreOffsetsFromCheckpoint(final boolean storeDirIsEmpty) {
try {
final Map<TopicPartition, Long> loadedCheckpoints = checkpointFile.read();
log.trace("Loaded offsets from the checkpoint file: {}", loadedCheckpoints);
for (final StateStoreMetadata store : stores.values()) {
if (store.corrupted) {
log.error("Tried to initialize store offsets for corrupted store {}", store);
throw new IllegalStateException("Should not initialize offsets for a corrupted task");
}
if (store.changelogPartition == null) {
log.info("State store {} is not logged and hence would not be restored", store.stateStore.name());
} else if (!store.stateStore.persistent()) {
log.info("Initializing to the starting offset for changelog {} of in-memory state store {}",
store.changelogPartition, store.stateStore.name());
} else if (store.offset() == null) {
if (loadedCheckpoints.containsKey(store.changelogPartition)) {
final Long offset = changelogOffsetFromCheckpointedOffset(loadedCheckpoints.remove(store.changelogPartition));
store.setOffset(offset);
log.info("State store {} initialized from checkpoint with offset {} at changelog {}",
store.stateStore.name(), store.offset, store.changelogPartition);
} else {
// with EOS, if the previous run did not shutdown gracefully, we may lost the checkpoint file
// and hence we are uncertain that the current local state only contains committed data;
// in that case we need to treat it as a task-corrupted exception
if (eosEnabled && !storeDirIsEmpty) {
log.warn("State store {} did not find checkpoint offsets while stores are not empty, " +
"since under EOS it has the risk of getting uncommitted data in stores we have to " +
"treat it as a task corruption error and wipe out the local state of task {} " +
"before re-bootstrapping", store.stateStore.name(), taskId);
throw new TaskCorruptedException(Collections.singleton(taskId));
} else {
log.info("State store {} did not find checkpoint offset, hence would " +
"default to the starting offset at changelog {}",
store.stateStore.name(), store.changelogPartition);
}
}
} else {
loadedCheckpoints.remove(store.changelogPartition);
log.debug("Skipping re-initialization of offset from checkpoint for recycled store {}",
store.stateStore.name());
}
}
if (!loadedCheckpoints.isEmpty()) {
log.warn("Some loaded checkpoint offsets cannot find their corresponding state stores: {}", loadedCheckpoints);
}
if (eosEnabled) {
checkpointFile.delete();
}
} catch (final TaskCorruptedException e) {
throw e;
} catch (final IOException | RuntimeException e) {
// both IOException or runtime exception like number parsing can throw
throw new ProcessorStateException(format("%sError loading and deleting checkpoint file when creating the state manager",
logPrefix), e);
}
}
private void maybeRegisterStoreWithChangelogReader(final String storeName) {
if (isLoggingEnabled(storeName) && changelogReader != null) {
changelogReader.register(getStorePartition(storeName), this);
}
}
private List<TopicPartition> getAllChangelogTopicPartitions() {
final List<TopicPartition> allChangelogPartitions = new ArrayList<>();
for (final StateStoreMetadata storeMetadata : stores.values()) {
if (storeMetadata.changelogPartition != null) {
allChangelogPartitions.add(storeMetadata.changelogPartition);
}
}
return allChangelogPartitions;
}
@Override
public File baseDir() {
return baseDir;
}
@Override
public void registerStore(final StateStore store,
final StateRestoreCallback stateRestoreCallback,
final CommitCallback commitCallback) {
final String storeName = store.name();
// TODO (KAFKA-12887): we should not trigger user's exception handler for illegal-argument but always
// fail-crash; in this case we would not need to immediately close the state store before throwing
if (CHECKPOINT_FILE_NAME.equals(storeName)) {
store.close();
throw new IllegalArgumentException(format("%sIllegal store name: %s, which collides with the pre-defined " +
"checkpoint file name", logPrefix, storeName));
}
if (stores.containsKey(storeName)) {
store.close();
throw new IllegalArgumentException(format("%sStore %s has already been registered.", logPrefix, storeName));
}
if (stateRestoreCallback instanceof StateRestoreListener) {
log.warn("The registered state restore callback is also implementing the state restore listener interface, " +
"which is not expected and would be ignored");
}
final StateStoreMetadata storeMetadata = isLoggingEnabled(storeName) ?
new StateStoreMetadata(
store,
getStorePartition(storeName),
stateRestoreCallback,
commitCallback,
converterForStore(store)) :
new StateStoreMetadata(store, commitCallback);
// register the store first, so that if later an exception is thrown then eventually while we call `close`
// on the state manager this state store would be closed as well
stores.put(storeName, storeMetadata);
if (!stateUpdaterEnabled) {
maybeRegisterStoreWithChangelogReader(storeName);
}
log.debug("Registered state store {} to its state manager", storeName);
}
@Override
public StateStore store(final String name) {
if (stores.containsKey(name)) {
return stores.get(name).stateStore;
} else {
return null;
}
}
Set<TopicPartition> changelogPartitions() {
return Collections.unmodifiableSet(changelogOffsets().keySet());
}
void markChangelogAsCorrupted(final Collection<TopicPartition> partitions) {
final Collection<TopicPartition> partitionsToMarkAsCorrupted = new LinkedList<>(partitions);
for (final StateStoreMetadata storeMetadata : stores.values()) {
if (partitionsToMarkAsCorrupted.contains(storeMetadata.changelogPartition)) {
storeMetadata.corrupted = true;
partitionsToMarkAsCorrupted.remove(storeMetadata.changelogPartition);
}
}
if (!partitionsToMarkAsCorrupted.isEmpty()) {
throw new IllegalStateException("Some partitions " + partitionsToMarkAsCorrupted + " are not contained in " +
"the store list of task " + taskId + " marking as corrupted, this is not expected");
}
}
@Override
public Map<TopicPartition, Long> changelogOffsets() {
// return the current offsets for those logged stores
final Map<TopicPartition, Long> changelogOffsets = new HashMap<>();
for (final StateStoreMetadata storeMetadata : stores.values()) {
if (storeMetadata.changelogPartition != null) {
// for changelog whose offset is unknown, use 0L indicating earliest offset
// otherwise return the current offset + 1 as the next offset to fetch
changelogOffsets.put(
storeMetadata.changelogPartition,
storeMetadata.offset == null ? 0L : storeMetadata.offset + 1L);
}
}
return changelogOffsets;
}
TaskId taskId() {
return taskId;
}
void transitionTaskState(final Task.State taskState) {
this.taskState = taskState;
}
Task.State taskState() {
return taskState;
}
// used by the changelog reader only
boolean changelogAsSource(final TopicPartition partition) {
return sourcePartitions.contains(partition);
}
@Override
public TaskType taskType() {
return taskType;
}
// used by the changelog reader only
StateStoreMetadata storeMetadata(final TopicPartition partition) {
for (final StateStoreMetadata storeMetadata : stores.values()) {
if (partition.equals(storeMetadata.changelogPartition)) {
return storeMetadata;
}
}
return null;
}
// used by the changelog reader only
void restore(final StateStoreMetadata storeMetadata, final List<ConsumerRecord<byte[], byte[]>> restoreRecords, final OptionalLong optionalLag) {
if (!stores.containsValue(storeMetadata)) {
throw new IllegalStateException("Restoring " + storeMetadata + " which is not registered in this state manager, " +
"this should not happen.");
}
if (!restoreRecords.isEmpty()) {
// restore states from changelog records and update the snapshot offset as the batch end record's offset
final Long batchEndOffset = restoreRecords.get(restoreRecords.size() - 1).offset();
final RecordBatchingStateRestoreCallback restoreCallback = adapt(storeMetadata.restoreCallback);
final List<ConsumerRecord<byte[], byte[]>> convertedRecords = restoreRecords.stream()
.map(storeMetadata.recordConverter::convert)
.collect(Collectors.toList());
try {
restoreCallback.restoreBatch(convertedRecords);
} catch (final RuntimeException e) {
throw new ProcessorStateException(
format("%sException caught while trying to restore state from %s", logPrefix, storeMetadata.changelogPartition),
e
);
}
storeMetadata.setOffset(batchEndOffset);
// If null means the lag for this partition is not known yet
if (optionalLag.isPresent()) {
storeMetadata.setEndOffset(optionalLag.getAsLong() + batchEndOffset);
}
}
}
/**
* @throws TaskMigratedException recoverable error sending changelog records that would cause the task to be removed
* @throws StreamsException fatal error when flushing the state store, for example sending changelog records failed
* or flushing state store get IO errors; such error should cause the thread to die
*/
@Override
public void flush() {
RuntimeException firstException = null;
// attempting to flush the stores
if (!stores.isEmpty()) {
log.debug("Flushing all stores registered in the state manager: {}", stores);
for (final StateStoreMetadata metadata : stores.values()) {
final StateStore store = metadata.stateStore;
log.trace("Flushing store {}", store.name());
try {
store.flush();
} catch (final RuntimeException exception) {
if (firstException == null) {
// do NOT wrap the error if it is actually caused by Streams itself
// In case of FailedProcessingException Do not keep the failed processing exception in the stack trace
if (exception instanceof FailedProcessingException)
firstException = new ProcessorStateException(
format("%sFailed to flush state store %s", logPrefix, store.name()),
exception.getCause());
else if (exception instanceof StreamsException)
firstException = exception;
else
firstException = new ProcessorStateException(
format("%sFailed to flush state store %s", logPrefix, store.name()), exception);
log.error("Failed to flush state store {}: ", store.name(), firstException);
} else {
log.error("Failed to flush state store {}: ", store.name(), exception);
}
}
}
}
if (firstException != null) {
throw firstException;
}
}
public void flushCache() {
RuntimeException firstException = null;
// attempting to flush the stores
if (!stores.isEmpty()) {
log.debug("Flushing all store caches registered in the state manager: {}", stores);
for (final StateStoreMetadata metadata : stores.values()) {
final StateStore store = metadata.stateStore;
try {
// buffer should be flushed to send all records to changelog
if (store instanceof TimeOrderedKeyValueBuffer) {
store.flush();
} else if (store instanceof CachedStateStore) {
((CachedStateStore<?, ?>) store).flushCache();
}
log.trace("Flushed cache or buffer {}", store.name());
} catch (final RuntimeException exception) {
if (firstException == null) {
// do NOT wrap the error if it is actually caused by Streams itself
// In case of FailedProcessingException Do not keep the failed processing exception in the stack trace
if (exception instanceof FailedProcessingException) {
firstException = new ProcessorStateException(
format("%sFailed to flush cache of store %s", logPrefix, store.name()),
exception.getCause());
} else if (exception instanceof StreamsException) {
firstException = exception;
} else {
firstException = new ProcessorStateException(
format("%sFailed to flush cache of store %s", logPrefix, store.name()),
exception
);
}
log.error("Failed to flush cache of store {}: ", store.name(), firstException);
} else {
log.error("Failed to flush cache of store {}: ", store.name(), exception);
}
}
}
}
if (firstException != null) {
throw firstException;
}
}
/**
* {@link StateStore#close() Close} all stores (even in case of failure).
* Log all exceptions and re-throw the first exception that occurred at the end.
*
* @throws ProcessorStateException if any error happens when closing the state stores
*/
@Override
public void close() throws ProcessorStateException {
log.debug("Closing its state manager and all the registered state stores: {}", stores);
if (!stateUpdaterEnabled && changelogReader != null) {
changelogReader.unregister(getAllChangelogTopicPartitions());
}
RuntimeException firstException = null;
// attempting to close the stores, just in case they
// are not closed by a ProcessorNode yet
if (!stores.isEmpty()) {
for (final Map.Entry<String, StateStoreMetadata> entry : stores.entrySet()) {
final StateStore store = entry.getValue().stateStore;
log.trace("Closing store {}", store.name());
try {
store.close();
} catch (final RuntimeException exception) {
if (firstException == null) {
// do NOT wrap the error if it is actually caused by Streams itself
// In case of FailedProcessingException Do not keep the failed processing exception in the stack trace
if (exception instanceof FailedProcessingException)
firstException = new ProcessorStateException(
format("%sFailed to close state store %s", logPrefix, store.name()),
exception.getCause());
else if (exception instanceof StreamsException)
firstException = exception;
else
firstException = new ProcessorStateException(
format("%sFailed to close state store %s", logPrefix, store.name()), exception);
log.error("Failed to close state store {}: ", store.name(), firstException);
} else {
log.error("Failed to close state store {}: ", store.name(), exception);
}
}
}
stores.clear();
}
if (firstException != null) {
throw firstException;
}
}
/**
* Alternative to {@link #close()} that just resets the changelogs without closing any of the underlying state
* or unregistering the stores themselves
*/
void recycle() {
log.debug("Recycling state for {} task {}.", taskType, taskId);
if (!stateUpdaterEnabled && changelogReader != null) {
final List<TopicPartition> allChangelogs = getAllChangelogTopicPartitions();
changelogReader.unregister(allChangelogs);
}
// when the state manager is recycled to be used, future writes may bypass its store's caching
// layer if they are from restoration, hence we need to clear the state store's caches just in case
// See KAFKA-14172 for details
if (!stores.isEmpty()) {
log.debug("Clearing all store caches registered in the state manager: {}", stores);
for (final StateStoreMetadata metadata : stores.values()) {
final StateStore store = metadata.stateStore;
if (store instanceof CachedStateStore) {
((CachedStateStore<?, ?>) store).clearCache();
}
log.trace("Cleared cache {}", store.name());
}
}
}
void transitionTaskType(final TaskType newType, final LogContext logContext) {
if (taskType.equals(newType)) {
throw new IllegalStateException("Tried to recycle state for task type conversion but new type was the same.");
}
taskType = newType;
log = logContext.logger(ProcessorStateManager.class);
}
@Override
public void updateChangelogOffsets(final Map<TopicPartition, Long> writtenOffsets) {
for (final Map.Entry<TopicPartition, Long> entry : writtenOffsets.entrySet()) {
final StateStoreMetadata store = findStore(entry.getKey());
if (store != null) {
store.setOffset(entry.getValue());
log.debug("State store {} updated to written offset {} at changelog {}",
store.stateStore.name(), store.offset, store.changelogPartition);
}
}
}
@Override
public void checkpoint() {
// checkpoint those stores that are only logged and persistent to the checkpoint file
final Map<TopicPartition, Long> checkpointingOffsets = new HashMap<>();
for (final StateStoreMetadata storeMetadata : stores.values()) {
if (storeMetadata.commitCallback != null && !storeMetadata.corrupted) {
try {
storeMetadata.commitCallback.onCommit();
} catch (final IOException e) {
throw new ProcessorStateException(
format("%sException caught while trying to checkpoint store, " +
"changelog partition %s", logPrefix, storeMetadata.changelogPartition),
e
);
}
}
// store is logged, persistent, not corrupted, and has a valid current offset
if (storeMetadata.changelogPartition != null &&
storeMetadata.stateStore.persistent() &&
!storeMetadata.corrupted) {
final long checkpointableOffset = checkpointableOffsetFromChangelogOffset(storeMetadata.offset);
checkpointingOffsets.put(storeMetadata.changelogPartition, checkpointableOffset);
}
}
log.debug("Writing checkpoint: {} for task {}", checkpointingOffsets, taskId);
try {
checkpointFile.write(checkpointingOffsets);
} catch (final IOException e) {
log.warn("Failed to write offset checkpoint file to [{}]." +
" This may occur if OS cleaned the state.dir in case when it located in ${java.io.tmpdir} directory." +
" This may also occur due to running multiple instances on the same machine using the same state dir." +
" Changing the location of state.dir may resolve the problem.",
checkpointFile, e);
}
}
private TopicPartition getStorePartition(final String storeName) {
// NOTE we assume the partition of the topic can always be inferred from the task id;
// if user ever use a custom partition grouper (deprecated in KIP-528) this would break and
// it is not a regression (it would always break anyways)
return new TopicPartition(changelogFor(storeName), taskId.partition());
}
private boolean isLoggingEnabled(final String storeName) {
// if the store name does not exist in the changelog map, it means the underlying store
// is not log enabled (including global stores)
return changelogFor(storeName) != null;
}
private StateStoreMetadata findStore(final TopicPartition changelogPartition) {
final List<StateStoreMetadata> found = stores.values().stream()
.filter(metadata -> changelogPartition.equals(metadata.changelogPartition))
.collect(Collectors.toList());
if (found.size() > 1) {
throw new IllegalStateException("Multiple state stores are found for changelog partition " + changelogPartition +
", this should never happen: " + found);
}
return found.isEmpty() ? null : found.get(0);
}
// Pass in a sentinel value to checkpoint when the changelog offset is not yet initialized/known
private long checkpointableOffsetFromChangelogOffset(final Long offset) {
return offset != null ? offset : OFFSET_UNKNOWN;
}
// Convert the written offsets in the checkpoint file back to the changelog offset
private Long changelogOffsetFromCheckpointedOffset(final long offset) {
return offset != OFFSET_UNKNOWN ? offset : null;
}
public TopicPartition registeredChangelogPartitionFor(final String storeName) {
final StateStoreMetadata storeMetadata = stores.get(storeName);
if (storeMetadata == null) {
throw new IllegalStateException("State store " + storeName
+ " for which the registered changelog partition should be"
+ " retrieved has not been registered"
);
}
if (storeMetadata.changelogPartition == null) {
throw new IllegalStateException("Registered state store " + storeName
+ " does not have a registered changelog partition."
+ " This may happen if logging is disabled for the state store."
);
}
return storeMetadata.changelogPartition;
}
public String changelogFor(final String storeName) {
return storeToChangelogTopic.get(storeName);
}
public void deleteCheckPointFileIfEOSEnabled() throws IOException {
if (eosEnabled) {
checkpointFile.delete();
}
}
}
|
googleapis/google-cloud-java | 37,634 | java-apihub/proto-google-cloud-apihub-v1/src/main/java/com/google/cloud/apihub/v1/DependencyEntityReference.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/apihub/v1/common_fields.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.apihub.v1;
/**
*
*
* <pre>
* Reference to an entity participating in a dependency.
* </pre>
*
* Protobuf type {@code google.cloud.apihub.v1.DependencyEntityReference}
*/
public final class DependencyEntityReference extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.apihub.v1.DependencyEntityReference)
DependencyEntityReferenceOrBuilder {
private static final long serialVersionUID = 0L;
// Use DependencyEntityReference.newBuilder() to construct.
private DependencyEntityReference(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DependencyEntityReference() {
displayName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DependencyEntityReference();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apihub.v1.CommonFieldsProto
.internal_static_google_cloud_apihub_v1_DependencyEntityReference_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apihub.v1.CommonFieldsProto
.internal_static_google_cloud_apihub_v1_DependencyEntityReference_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apihub.v1.DependencyEntityReference.class,
com.google.cloud.apihub.v1.DependencyEntityReference.Builder.class);
}
private int identifierCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object identifier_;
public enum IdentifierCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
OPERATION_RESOURCE_NAME(2),
EXTERNAL_API_RESOURCE_NAME(3),
IDENTIFIER_NOT_SET(0);
private final int value;
private IdentifierCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static IdentifierCase valueOf(int value) {
return forNumber(value);
}
public static IdentifierCase forNumber(int value) {
switch (value) {
case 2:
return OPERATION_RESOURCE_NAME;
case 3:
return EXTERNAL_API_RESOURCE_NAME;
case 0:
return IDENTIFIER_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public IdentifierCase getIdentifierCase() {
return IdentifierCase.forNumber(identifierCase_);
}
public static final int OPERATION_RESOURCE_NAME_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* The resource name of an operation in the API Hub.
*
* Format:
* `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}`
* </pre>
*
* <code>string operation_resource_name = 2;</code>
*
* @return Whether the operationResourceName field is set.
*/
public boolean hasOperationResourceName() {
return identifierCase_ == 2;
}
/**
*
*
* <pre>
* The resource name of an operation in the API Hub.
*
* Format:
* `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}`
* </pre>
*
* <code>string operation_resource_name = 2;</code>
*
* @return The operationResourceName.
*/
public java.lang.String getOperationResourceName() {
java.lang.Object ref = "";
if (identifierCase_ == 2) {
ref = identifier_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (identifierCase_ == 2) {
identifier_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* The resource name of an operation in the API Hub.
*
* Format:
* `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}`
* </pre>
*
* <code>string operation_resource_name = 2;</code>
*
* @return The bytes for operationResourceName.
*/
public com.google.protobuf.ByteString getOperationResourceNameBytes() {
java.lang.Object ref = "";
if (identifierCase_ == 2) {
ref = identifier_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (identifierCase_ == 2) {
identifier_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int EXTERNAL_API_RESOURCE_NAME_FIELD_NUMBER = 3;
/**
*
*
* <pre>
* The resource name of an external API in the API Hub.
*
* Format:
* `projects/{project}/locations/{location}/externalApis/{external_api}`
* </pre>
*
* <code>string external_api_resource_name = 3;</code>
*
* @return Whether the externalApiResourceName field is set.
*/
public boolean hasExternalApiResourceName() {
return identifierCase_ == 3;
}
/**
*
*
* <pre>
* The resource name of an external API in the API Hub.
*
* Format:
* `projects/{project}/locations/{location}/externalApis/{external_api}`
* </pre>
*
* <code>string external_api_resource_name = 3;</code>
*
* @return The externalApiResourceName.
*/
public java.lang.String getExternalApiResourceName() {
java.lang.Object ref = "";
if (identifierCase_ == 3) {
ref = identifier_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (identifierCase_ == 3) {
identifier_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* The resource name of an external API in the API Hub.
*
* Format:
* `projects/{project}/locations/{location}/externalApis/{external_api}`
* </pre>
*
* <code>string external_api_resource_name = 3;</code>
*
* @return The bytes for externalApiResourceName.
*/
public com.google.protobuf.ByteString getExternalApiResourceNameBytes() {
java.lang.Object ref = "";
if (identifierCase_ == 3) {
ref = identifier_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (identifierCase_ == 3) {
identifier_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DISPLAY_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object displayName_ = "";
/**
*
*
* <pre>
* Output only. Display name of the entity.
* </pre>
*
* <code>string display_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The displayName.
*/
@java.lang.Override
public java.lang.String getDisplayName() {
java.lang.Object ref = displayName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
displayName_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. Display name of the entity.
* </pre>
*
* <code>string display_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for displayName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDisplayNameBytes() {
java.lang.Object ref = displayName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
displayName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, displayName_);
}
if (identifierCase_ == 2) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, identifier_);
}
if (identifierCase_ == 3) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, identifier_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, displayName_);
}
if (identifierCase_ == 2) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, identifier_);
}
if (identifierCase_ == 3) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, identifier_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.apihub.v1.DependencyEntityReference)) {
return super.equals(obj);
}
com.google.cloud.apihub.v1.DependencyEntityReference other =
(com.google.cloud.apihub.v1.DependencyEntityReference) obj;
if (!getDisplayName().equals(other.getDisplayName())) return false;
if (!getIdentifierCase().equals(other.getIdentifierCase())) return false;
switch (identifierCase_) {
case 2:
if (!getOperationResourceName().equals(other.getOperationResourceName())) return false;
break;
case 3:
if (!getExternalApiResourceName().equals(other.getExternalApiResourceName())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + DISPLAY_NAME_FIELD_NUMBER;
hash = (53 * hash) + getDisplayName().hashCode();
switch (identifierCase_) {
case 2:
hash = (37 * hash) + OPERATION_RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getOperationResourceName().hashCode();
break;
case 3:
hash = (37 * hash) + EXTERNAL_API_RESOURCE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getExternalApiResourceName().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.apihub.v1.DependencyEntityReference parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apihub.v1.DependencyEntityReference parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apihub.v1.DependencyEntityReference parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apihub.v1.DependencyEntityReference parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apihub.v1.DependencyEntityReference parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apihub.v1.DependencyEntityReference parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apihub.v1.DependencyEntityReference parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apihub.v1.DependencyEntityReference parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apihub.v1.DependencyEntityReference parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.apihub.v1.DependencyEntityReference parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apihub.v1.DependencyEntityReference parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apihub.v1.DependencyEntityReference parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.apihub.v1.DependencyEntityReference prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Reference to an entity participating in a dependency.
* </pre>
*
* Protobuf type {@code google.cloud.apihub.v1.DependencyEntityReference}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.apihub.v1.DependencyEntityReference)
com.google.cloud.apihub.v1.DependencyEntityReferenceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apihub.v1.CommonFieldsProto
.internal_static_google_cloud_apihub_v1_DependencyEntityReference_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apihub.v1.CommonFieldsProto
.internal_static_google_cloud_apihub_v1_DependencyEntityReference_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apihub.v1.DependencyEntityReference.class,
com.google.cloud.apihub.v1.DependencyEntityReference.Builder.class);
}
// Construct using com.google.cloud.apihub.v1.DependencyEntityReference.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
displayName_ = "";
identifierCase_ = 0;
identifier_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.apihub.v1.CommonFieldsProto
.internal_static_google_cloud_apihub_v1_DependencyEntityReference_descriptor;
}
@java.lang.Override
public com.google.cloud.apihub.v1.DependencyEntityReference getDefaultInstanceForType() {
return com.google.cloud.apihub.v1.DependencyEntityReference.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.apihub.v1.DependencyEntityReference build() {
com.google.cloud.apihub.v1.DependencyEntityReference result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.apihub.v1.DependencyEntityReference buildPartial() {
com.google.cloud.apihub.v1.DependencyEntityReference result =
new com.google.cloud.apihub.v1.DependencyEntityReference(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.apihub.v1.DependencyEntityReference result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.displayName_ = displayName_;
}
}
private void buildPartialOneofs(com.google.cloud.apihub.v1.DependencyEntityReference result) {
result.identifierCase_ = identifierCase_;
result.identifier_ = this.identifier_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.apihub.v1.DependencyEntityReference) {
return mergeFrom((com.google.cloud.apihub.v1.DependencyEntityReference) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.apihub.v1.DependencyEntityReference other) {
if (other == com.google.cloud.apihub.v1.DependencyEntityReference.getDefaultInstance())
return this;
if (!other.getDisplayName().isEmpty()) {
displayName_ = other.displayName_;
bitField0_ |= 0x00000004;
onChanged();
}
switch (other.getIdentifierCase()) {
case OPERATION_RESOURCE_NAME:
{
identifierCase_ = 2;
identifier_ = other.identifier_;
onChanged();
break;
}
case EXTERNAL_API_RESOURCE_NAME:
{
identifierCase_ = 3;
identifier_ = other.identifier_;
onChanged();
break;
}
case IDENTIFIER_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
displayName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
identifierCase_ = 2;
identifier_ = s;
break;
} // case 18
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
identifierCase_ = 3;
identifier_ = s;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int identifierCase_ = 0;
private java.lang.Object identifier_;
public IdentifierCase getIdentifierCase() {
return IdentifierCase.forNumber(identifierCase_);
}
public Builder clearIdentifier() {
identifierCase_ = 0;
identifier_ = null;
onChanged();
return this;
}
private int bitField0_;
/**
*
*
* <pre>
* The resource name of an operation in the API Hub.
*
* Format:
* `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}`
* </pre>
*
* <code>string operation_resource_name = 2;</code>
*
* @return Whether the operationResourceName field is set.
*/
@java.lang.Override
public boolean hasOperationResourceName() {
return identifierCase_ == 2;
}
/**
*
*
* <pre>
* The resource name of an operation in the API Hub.
*
* Format:
* `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}`
* </pre>
*
* <code>string operation_resource_name = 2;</code>
*
* @return The operationResourceName.
*/
@java.lang.Override
public java.lang.String getOperationResourceName() {
java.lang.Object ref = "";
if (identifierCase_ == 2) {
ref = identifier_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (identifierCase_ == 2) {
identifier_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The resource name of an operation in the API Hub.
*
* Format:
* `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}`
* </pre>
*
* <code>string operation_resource_name = 2;</code>
*
* @return The bytes for operationResourceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOperationResourceNameBytes() {
java.lang.Object ref = "";
if (identifierCase_ == 2) {
ref = identifier_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (identifierCase_ == 2) {
identifier_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The resource name of an operation in the API Hub.
*
* Format:
* `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}`
* </pre>
*
* <code>string operation_resource_name = 2;</code>
*
* @param value The operationResourceName to set.
* @return This builder for chaining.
*/
public Builder setOperationResourceName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
identifierCase_ = 2;
identifier_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The resource name of an operation in the API Hub.
*
* Format:
* `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}`
* </pre>
*
* <code>string operation_resource_name = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearOperationResourceName() {
if (identifierCase_ == 2) {
identifierCase_ = 0;
identifier_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The resource name of an operation in the API Hub.
*
* Format:
* `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}`
* </pre>
*
* <code>string operation_resource_name = 2;</code>
*
* @param value The bytes for operationResourceName to set.
* @return This builder for chaining.
*/
public Builder setOperationResourceNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
identifierCase_ = 2;
identifier_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The resource name of an external API in the API Hub.
*
* Format:
* `projects/{project}/locations/{location}/externalApis/{external_api}`
* </pre>
*
* <code>string external_api_resource_name = 3;</code>
*
* @return Whether the externalApiResourceName field is set.
*/
@java.lang.Override
public boolean hasExternalApiResourceName() {
return identifierCase_ == 3;
}
/**
*
*
* <pre>
* The resource name of an external API in the API Hub.
*
* Format:
* `projects/{project}/locations/{location}/externalApis/{external_api}`
* </pre>
*
* <code>string external_api_resource_name = 3;</code>
*
* @return The externalApiResourceName.
*/
@java.lang.Override
public java.lang.String getExternalApiResourceName() {
java.lang.Object ref = "";
if (identifierCase_ == 3) {
ref = identifier_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (identifierCase_ == 3) {
identifier_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The resource name of an external API in the API Hub.
*
* Format:
* `projects/{project}/locations/{location}/externalApis/{external_api}`
* </pre>
*
* <code>string external_api_resource_name = 3;</code>
*
* @return The bytes for externalApiResourceName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getExternalApiResourceNameBytes() {
java.lang.Object ref = "";
if (identifierCase_ == 3) {
ref = identifier_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (identifierCase_ == 3) {
identifier_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The resource name of an external API in the API Hub.
*
* Format:
* `projects/{project}/locations/{location}/externalApis/{external_api}`
* </pre>
*
* <code>string external_api_resource_name = 3;</code>
*
* @param value The externalApiResourceName to set.
* @return This builder for chaining.
*/
public Builder setExternalApiResourceName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
identifierCase_ = 3;
identifier_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The resource name of an external API in the API Hub.
*
* Format:
* `projects/{project}/locations/{location}/externalApis/{external_api}`
* </pre>
*
* <code>string external_api_resource_name = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearExternalApiResourceName() {
if (identifierCase_ == 3) {
identifierCase_ = 0;
identifier_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The resource name of an external API in the API Hub.
*
* Format:
* `projects/{project}/locations/{location}/externalApis/{external_api}`
* </pre>
*
* <code>string external_api_resource_name = 3;</code>
*
* @param value The bytes for externalApiResourceName to set.
* @return This builder for chaining.
*/
public Builder setExternalApiResourceNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
identifierCase_ = 3;
identifier_ = value;
onChanged();
return this;
}
private java.lang.Object displayName_ = "";
/**
*
*
* <pre>
* Output only. Display name of the entity.
* </pre>
*
* <code>string display_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The displayName.
*/
public java.lang.String getDisplayName() {
java.lang.Object ref = displayName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
displayName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. Display name of the entity.
* </pre>
*
* <code>string display_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for displayName.
*/
public com.google.protobuf.ByteString getDisplayNameBytes() {
java.lang.Object ref = displayName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
displayName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. Display name of the entity.
* </pre>
*
* <code>string display_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The displayName to set.
* @return This builder for chaining.
*/
public Builder setDisplayName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
displayName_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Display name of the entity.
* </pre>
*
* <code>string display_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearDisplayName() {
displayName_ = getDefaultInstance().getDisplayName();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Display name of the entity.
* </pre>
*
* <code>string display_name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for displayName to set.
* @return This builder for chaining.
*/
public Builder setDisplayNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
displayName_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.apihub.v1.DependencyEntityReference)
}
// @@protoc_insertion_point(class_scope:google.cloud.apihub.v1.DependencyEntityReference)
private static final com.google.cloud.apihub.v1.DependencyEntityReference DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.apihub.v1.DependencyEntityReference();
}
public static com.google.cloud.apihub.v1.DependencyEntityReference getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DependencyEntityReference> PARSER =
new com.google.protobuf.AbstractParser<DependencyEntityReference>() {
@java.lang.Override
public DependencyEntityReference parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DependencyEntityReference> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DependencyEntityReference> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.apihub.v1.DependencyEntityReference getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/incubator-tuweni | 37,548 | units/src/test/java/org/apache/tuweni/units/bigints/BaseUInt384ValueTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
* file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
* to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.tuweni.units.bigints;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import org.apache.tuweni.bytes.Bytes;
import java.math.BigInteger;
import java.util.stream.Stream;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
// This test is in a `test` sub-package to ensure that it does not have access to package-private
// methods within the bigints package, as it should be testing the usage of the public API.
class BaseUInt384ValueTest {
private static class Value extends BaseUInt384Value<Value> {
static final Value MAX_VALUE = new Value(UInt384.MAX_VALUE);
Value(UInt384 v) {
super(v, Value::new);
}
Value(long v) {
super(v, Value::new);
}
Value(BigInteger s) {
super(s, Value::new);
}
}
private static Value v(long v) {
return new Value(v);
}
private static Value biv(String s) {
return new Value(new BigInteger(s));
}
private static Value hv(String s) {
return new Value(UInt384.fromHexString(s));
}
@ParameterizedTest
@MethodSource("addProvider")
void add(Value v1, Value v2, Value expected) {
assertValueEquals(expected, v1.add(v2));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> addProvider() {
return Stream
.of(
Arguments.of(v(1), v(0), v(1)),
Arguments.of(v(5), v(0), v(5)),
Arguments.of(v(0), v(1), v(1)),
Arguments.of(v(0), v(100), v(100)),
Arguments.of(v(2), v(2), v(4)),
Arguments.of(v(100), v(90), v(190)),
Arguments.of(biv("9223372036854775807"), v(1), biv("9223372036854775808")),
Arguments.of(biv("13492324908428420834234908342"), v(10), biv("13492324908428420834234908352")),
Arguments
.of(biv("13492324908428420834234908342"), v(23422141424214L), biv("13492324908428444256376332556")),
Arguments.of(new Value(UInt384.MAX_VALUE), v(1), v(0)),
Arguments.of(new Value(UInt384.MAX_VALUE), v(2), v(1)),
Arguments
.of(
hv(
"0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF0"),
v(1),
hv(
"0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF1")),
Arguments
.of(
hv(
"0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE"),
v(1),
new Value(UInt384.MAX_VALUE)));
}
@ParameterizedTest
@MethodSource("addLongProvider")
void addLong(Value v1, long v2, Value expected) {
assertValueEquals(expected, v1.add(v2));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> addLongProvider() {
return Stream
.of(
Arguments.of(v(1), 0L, v(1)),
Arguments.of(v(5), 0L, v(5)),
Arguments.of(v(0), 1L, v(1)),
Arguments.of(v(0), 100L, v(100)),
Arguments.of(v(2), 2L, v(4)),
Arguments.of(v(100), 90L, v(190)),
Arguments.of(biv("13492324908428420834234908342"), 10L, biv("13492324908428420834234908352")),
Arguments.of(biv("13492324908428420834234908342"), 23422141424214L, biv("13492324908428444256376332556")),
Arguments.of(new Value(UInt384.MAX_VALUE), 1L, v(0)),
Arguments.of(new Value(UInt384.MAX_VALUE), 2L, v(1)),
Arguments
.of(
hv("0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF0"),
1L,
hv("0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF1")),
Arguments
.of(
hv(
"0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE"),
1L,
new Value(UInt384.MAX_VALUE)),
Arguments.of(v(10), -5L, v(5)),
Arguments.of(v(0), -1L, new Value(UInt384.MAX_VALUE)));
}
@ParameterizedTest
@MethodSource("addModProvider")
void addMod(Value v1, Value v2, UInt384 m, Value expected) {
assertValueEquals(expected, v1.addMod(v2, m));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> addModProvider() {
return Stream
.of(
Arguments.of(v(0), v(1), UInt384.valueOf(2), v(1)),
Arguments.of(v(1), v(1), UInt384.valueOf(2), v(0)),
Arguments
.of(
new Value(UInt384.MAX_VALUE.subtract(2)),
v(1),
UInt384.MAX_VALUE,
new Value(UInt384.MAX_VALUE.subtract(1))),
Arguments.of(new Value(UInt384.MAX_VALUE.subtract(1)), v(1), UInt384.MAX_VALUE, v(0)),
Arguments.of(v(2), v(1), UInt384.valueOf(2), v(1)),
Arguments.of(v(3), v(2), UInt384.valueOf(6), v(5)),
Arguments.of(v(3), v(4), UInt384.valueOf(2), v(1)));
}
@Test
void shouldThrowForAddModOfZero() {
Throwable exception = assertThrows(ArithmeticException.class, () -> v(0).addMod(v(1), UInt384.ZERO));
assertEquals("addMod with zero modulus", exception.getMessage());
}
@ParameterizedTest
@MethodSource("addModUInt384UInt384Provider")
void addModUInt384UInt384(Value v1, Value v2, UInt384 m, Value expected) {
assertValueEquals(expected, v1.addMod(v2, m));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> addModUInt384UInt384Provider() {
return Stream
.of(
Arguments.of(v(0), v(1), UInt384.valueOf(2), v(1)),
Arguments.of(v(1), v(1), UInt384.valueOf(2), v(0)),
Arguments
.of(
new Value(UInt384.MAX_VALUE.subtract(2)),
v(1),
UInt384.MAX_VALUE,
new Value(UInt384.MAX_VALUE.subtract(1))),
Arguments.of(new Value(UInt384.MAX_VALUE.subtract(1)), new Value(UInt384.ONE), UInt384.MAX_VALUE, v(0)),
Arguments.of(v(2), new Value(UInt384.ONE), UInt384.valueOf(2), v(1)),
Arguments.of(v(3), v(2), UInt384.valueOf(6), v(5)),
Arguments.of(v(3), v(4), UInt384.valueOf(2), v(1)));
}
@Test
void shouldThrowForAddModLongUInt384OfZero() {
Throwable exception = assertThrows(ArithmeticException.class, () -> v(0).addMod(1, UInt384.ZERO));
assertEquals("addMod with zero modulus", exception.getMessage());
}
@ParameterizedTest
@MethodSource("addModLongUInt384Provider")
void addModLongUInt384(Value v1, long v2, UInt384 m, Value expected) {
assertValueEquals(expected, v1.addMod(v2, m));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> addModLongUInt384Provider() {
return Stream
.of(
Arguments.of(v(0), 1L, UInt384.valueOf(2), v(1)),
Arguments.of(v(1), 1L, UInt384.valueOf(2), v(0)),
Arguments
.of(
new Value(UInt384.MAX_VALUE.subtract(2)),
1L,
UInt384.MAX_VALUE,
new Value(UInt384.MAX_VALUE.subtract(1))),
Arguments.of(new Value(UInt384.MAX_VALUE.subtract(1)), 1L, UInt384.MAX_VALUE, v(0)),
Arguments.of(v(2), 1L, UInt384.valueOf(2), v(1)),
Arguments.of(v(2), -1L, UInt384.valueOf(2), v(1)),
Arguments.of(v(1), -7L, UInt384.valueOf(5), v(4)));
}
@Test
void shouldThrowForAddModUInt384UInt384OfZero() {
Throwable exception = assertThrows(ArithmeticException.class, () -> v(0).addMod(UInt384.ONE, UInt384.ZERO));
assertEquals("addMod with zero modulus", exception.getMessage());
}
@ParameterizedTest
@MethodSource("addModLongLongProvider")
void addModLongLong(Value v1, long v2, long m, Value expected) {
assertValueEquals(expected, v1.addMod(v2, m));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> addModLongLongProvider() {
return Stream
.of(Arguments.of(v(0), 1L, 2L, v(1)), Arguments.of(v(1), 1L, 2L, v(0)), Arguments.of(v(2), 1L, 2L, v(1)));
}
@Test
void shouldThrowForAddModLongLongOfZero() {
Throwable exception = assertThrows(ArithmeticException.class, () -> v(0).addMod(1, 0));
assertEquals("addMod with zero modulus", exception.getMessage());
}
@Test
void shouldThrowForAddModLongLongOfNegative() {
Throwable exception = assertThrows(ArithmeticException.class, () -> v(0).addMod(1, -5));
assertEquals("addMod unsigned with negative modulus", exception.getMessage());
}
@ParameterizedTest
@MethodSource("subtractProvider")
void subtract(Value v1, Value v2, Value expected) {
assertValueEquals(expected, v1.subtract(v2));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> subtractProvider() {
return Stream
.of(
Arguments.of(v(1), v(0), v(1)),
Arguments.of(v(5), v(0), v(5)),
Arguments.of(v(2), v(1), v(1)),
Arguments.of(v(100), v(100), v(0)),
Arguments.of(biv("13492324908428420834234908342"), v(10), biv("13492324908428420834234908332")),
Arguments
.of(biv("13492324908428420834234908342"), v(23422141424214L), biv("13492324908428397412093484128")),
Arguments.of(v(0), v(1), new Value(UInt384.MAX_VALUE)),
Arguments
.of(
v(1),
v(2),
new Value(UInt384.MAX_VALUE),
Arguments
.of(
UInt384.MAX_VALUE,
v(1),
hv(
"0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE"))));
}
@ParameterizedTest
@MethodSource("subtractLongProvider")
void subtractLong(Value v1, long v2, Value expected) {
assertValueEquals(expected, v1.subtract(v2));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> subtractLongProvider() {
return Stream
.of(
Arguments.of(v(1), 0L, v(1)),
Arguments.of(v(5), 0L, v(5)),
Arguments.of(v(2), 1L, v(1)),
Arguments.of(v(100), 100L, v(0)),
Arguments.of(biv("13492324908428420834234908342"), 10L, biv("13492324908428420834234908332")),
Arguments.of(biv("13492324908428420834234908342"), 23422141424214L, biv("13492324908428397412093484128")),
Arguments.of(v(0), 1L, new Value(UInt384.MAX_VALUE)),
Arguments.of(v(1), 2L, new Value(UInt384.MAX_VALUE)),
Arguments
.of(
new Value(UInt384.MAX_VALUE),
1L,
hv(
"0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE")),
Arguments.of(v(0), -1L, v(1)),
Arguments.of(v(0), -100L, v(100)),
Arguments.of(v(2), -2L, v(4)));
}
@ParameterizedTest
@MethodSource("multiplyProvider")
void multiply(Value v1, Value v2, Value expected) {
assertValueEquals(expected, v1.multiply(v2));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> multiplyProvider() {
return Stream
.of(
Arguments.of(v(0), v(2), v(0)),
Arguments.of(v(1), v(2), v(2)),
Arguments.of(v(2), v(2), v(4)),
Arguments.of(v(3), v(2), v(6)),
Arguments.of(v(4), v(2), v(8)),
Arguments.of(v(10), v(18), v(180)),
Arguments.of(biv("13492324908428420834234908341"), v(2), biv("26984649816856841668469816682")),
Arguments.of(biv("13492324908428420834234908342"), v(2), biv("26984649816856841668469816684")),
Arguments.of(v(2), v(8), v(16)),
Arguments.of(v(7), v(8), v(56)),
Arguments.of(v(8), v(8), v(64)),
Arguments.of(v(17), v(8), v(136)),
Arguments.of(biv("13492324908428420834234908342"), v(8), biv("107938599267427366673879266736")),
Arguments.of(biv("13492324908428420834234908342"), v(2048), biv("27632281412461405868513092284416")),
Arguments.of(biv("13492324908428420834234908342"), v(131072), biv("1768466010397529975584837906202624")),
Arguments.of(v(22), v(0), v(0)));
}
@ParameterizedTest
@MethodSource("multiplyLongProvider")
void multiplyLong(Value v1, long v2, Value expected) {
assertValueEquals(expected, v1.multiply(v2));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> multiplyLongProvider() {
return Stream
.of(
Arguments.of(v(0), 2L, v(0)),
Arguments.of(v(1), 2L, v(2)),
Arguments.of(v(2), 2L, v(4)),
Arguments.of(v(3), 2L, v(6)),
Arguments.of(v(4), 2L, v(8)),
Arguments.of(v(10), 18L, v(180)),
Arguments.of(biv("13492324908428420834234908341"), 2L, biv("26984649816856841668469816682")),
Arguments.of(biv("13492324908428420834234908342"), 2L, biv("26984649816856841668469816684")),
Arguments.of(v(2), 8L, v(16)),
Arguments.of(v(7), 8L, v(56)),
Arguments.of(v(8), 8L, v(64)),
Arguments.of(v(17), 8L, v(136)),
Arguments.of(biv("13492324908428420834234908342"), 8L, biv("107938599267427366673879266736")),
Arguments.of(biv("13492324908428420834234908342"), 2048L, biv("27632281412461405868513092284416")),
Arguments.of(biv("13492324908428420834234908342"), 131072L, biv("1768466010397529975584837906202624")),
Arguments.of(v(22), 0L, v(0)),
Arguments
.of(
hv(
"0x0FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"),
2L,
hv(
"0x1FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE")),
Arguments
.of(
hv(
"0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"),
2L,
hv(
"0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE")));
}
@Test
void shouldThrowForMultiplyLongOfNegative() {
Throwable exception = assertThrows(ArithmeticException.class, () -> v(2).multiply(-5));
assertEquals("multiply unsigned by negative", exception.getMessage());
}
@ParameterizedTest
@MethodSource("multiplyModProvider")
void multiplyMod(Value v1, Value v2, UInt384 m, Value expected) {
assertValueEquals(expected, v1.multiplyMod(v2, m));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> multiplyModProvider() {
return Stream
.of(
Arguments.of(v(0), v(5), UInt384.valueOf(2), v(0)),
Arguments.of(v(2), v(3), UInt384.valueOf(7), v(6)),
Arguments.of(v(2), v(3), UInt384.valueOf(6), v(0)),
Arguments.of(v(2), v(0), UInt384.valueOf(6), v(0)),
Arguments
.of(
hv(
"0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE"),
v(2),
UInt384.MAX_VALUE,
hv(
"0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFD")));
}
@Test
void shouldThrowForMultiplyModOfModZero() {
Throwable exception = assertThrows(ArithmeticException.class, () -> v(0).multiplyMod(v(1), UInt384.ZERO));
assertEquals("multiplyMod with zero modulus", exception.getMessage());
}
@ParameterizedTest
@MethodSource("multiplyModLongUInt384Provider")
void multiplyModLongUInt384(Value v1, long v2, UInt384 m, Value expected) {
assertValueEquals(expected, v1.multiplyMod(v2, m));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> multiplyModLongUInt384Provider() {
return Stream
.of(
Arguments.of(v(0), 5L, UInt384.valueOf(2), v(0)),
Arguments.of(v(2), 3L, UInt384.valueOf(7), v(6)),
Arguments.of(v(2), 3L, UInt384.valueOf(6), v(0)),
Arguments.of(v(2), 0L, UInt384.valueOf(6), v(0)),
Arguments
.of(
hv(
"0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE"),
2L,
UInt384.MAX_VALUE,
hv(
"0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFD")));
}
@Test
void shouldThrowForMultiplyModLongUInt384OfModZero() {
Throwable exception = assertThrows(ArithmeticException.class, () -> v(5).multiplyMod(1L, UInt384.ZERO));
assertEquals("multiplyMod with zero modulus", exception.getMessage());
}
@Test
void shouldThrowForMultiplyModLongUInt384OfNegative() {
Throwable exception = assertThrows(ArithmeticException.class, () -> v(3).multiplyMod(-1, UInt384.valueOf(2)));
assertEquals("multiplyMod unsigned by negative", exception.getMessage());
}
@ParameterizedTest
@MethodSource("multiplyModLongLongProvider")
void multiplyModLongLong(Value v1, long v2, long m, Value expected) {
assertValueEquals(expected, v1.multiplyMod(v2, m));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> multiplyModLongLongProvider() {
return Stream
.of(
Arguments.of(v(0), 5L, 2L, v(0)),
Arguments.of(v(2), 3L, 7L, v(6)),
Arguments.of(v(2), 3L, 6L, v(0)),
Arguments.of(v(2), 0L, 6L, v(0)),
Arguments
.of(
hv("0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE"),
2L,
Long.MAX_VALUE,
hv("0x000000000000000000000000000000000000000000000000000000000000001C")));
}
@Test
void shouldThrowForMultiplyModLongLongOfModZero() {
Throwable exception = assertThrows(ArithmeticException.class, () -> v(5).multiplyMod(1, 0));
assertEquals("multiplyMod with zero modulus", exception.getMessage());
}
@Test
void shouldThrowForMultiplyModLongLongOfModNegative() {
Throwable exception = assertThrows(ArithmeticException.class, () -> v(2).multiplyMod(5, -7));
assertEquals("multiplyMod unsigned with negative modulus", exception.getMessage());
}
@Test
void shouldThrowForMultiplyModLongLongOfNegative() {
Throwable exception = assertThrows(ArithmeticException.class, () -> v(3).multiplyMod(-1, 2));
assertEquals("multiplyMod unsigned by negative", exception.getMessage());
}
@ParameterizedTest
@MethodSource("divideProvider")
void divide(Value v1, Value v2, Value expected) {
assertValueEquals(expected, v1.divide(v2));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> divideProvider() {
return Stream
.of(
Arguments.of(v(0), v(2), v(0)),
Arguments.of(v(1), v(2), v(0)),
Arguments.of(v(2), v(2), v(1)),
Arguments.of(v(3), v(2), v(1)),
Arguments.of(v(4), v(2), v(2)),
Arguments.of(biv("13492324908428420834234908341"), v(2), biv("6746162454214210417117454170")),
Arguments.of(biv("13492324908428420834234908342"), v(2), biv("6746162454214210417117454171")),
Arguments.of(biv("13492324908428420834234908343"), v(2), biv("6746162454214210417117454171")),
Arguments.of(v(2), v(8), v(0)),
Arguments.of(v(7), v(8), v(0)),
Arguments.of(v(8), v(8), v(1)),
Arguments.of(v(9), v(8), v(1)),
Arguments.of(v(17), v(8), v(2)),
Arguments.of(v(1024), v(8), v(128)),
Arguments.of(v(1026), v(8), v(128)),
Arguments.of(biv("13492324908428420834234908342"), v(8), biv("1686540613553552604279363542")),
Arguments.of(biv("13492324908428420834234908342"), v(2048), biv("6588049271693564860466263")),
Arguments.of(biv("13492324908428420834234908342"), v(131072), biv("102938269870211950944785")));
}
@Test
void shouldThrowForDivideByZero() {
Throwable exception = assertThrows(ArithmeticException.class, () -> v(5).divide(v(0)));
assertEquals("divide by zero", exception.getMessage());
}
@ParameterizedTest
@MethodSource("divideLongProvider")
void divideLong(Value v1, long v2, Value expected) {
assertValueEquals(expected, v1.divide(v2));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> divideLongProvider() {
return Stream
.of(
Arguments.of(v(0), 2L, v(0)),
Arguments.of(v(1), 2L, v(0)),
Arguments.of(v(2), 2L, v(1)),
Arguments.of(v(3), 2L, v(1)),
Arguments.of(v(4), 2L, v(2)),
Arguments.of(biv("13492324908428420834234908341"), 2L, biv("6746162454214210417117454170")),
Arguments.of(biv("13492324908428420834234908342"), 2L, biv("6746162454214210417117454171")),
Arguments.of(biv("13492324908428420834234908343"), 2L, biv("6746162454214210417117454171")),
Arguments.of(v(2), 8L, v(0)),
Arguments.of(v(7), 8L, v(0)),
Arguments.of(v(8), 8L, v(1)),
Arguments.of(v(9), 8L, v(1)),
Arguments.of(v(17), 8L, v(2)),
Arguments.of(v(1024), 8L, v(128)),
Arguments.of(v(1026), 8L, v(128)),
Arguments.of(biv("13492324908428420834234908342"), 8L, biv("1686540613553552604279363542")),
Arguments.of(biv("13492324908428420834234908342"), 2048L, biv("6588049271693564860466263")),
Arguments.of(biv("13492324908428420834234908342"), 131072L, biv("102938269870211950944785")));
}
@Test
void shouldThrowForDivideLongByZero() {
Throwable exception = assertThrows(ArithmeticException.class, () -> v(5).divide(0));
assertEquals("divide by zero", exception.getMessage());
}
@Test
void shouldThrowForDivideLongByNegative() {
Throwable exception = assertThrows(ArithmeticException.class, () -> v(5).divide(-5));
assertEquals("divide unsigned by negative", exception.getMessage());
}
@ParameterizedTest
@MethodSource("powUInt384Provider")
void powUInt384(Value v1, UInt384 v2, Value expected) {
assertValueEquals(expected, v1.pow(v2));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> powUInt384Provider() {
return Stream
.of(
Arguments.of(v(0), UInt384.valueOf(2), v(0)),
Arguments.of(v(2), UInt384.valueOf(2), v(4)),
Arguments.of(v(2), UInt384.valueOf(8), v(256)),
Arguments.of(v(3), UInt384.valueOf(3), v(27)),
Arguments
.of(
hv(
"0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF0F0F0"),
UInt384.valueOf(3),
hv(
"0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF2A920E119A2F000")));
}
@ParameterizedTest
@MethodSource("powLongProvider")
void powLong(Value v1, long v2, Value expected) {
assertValueEquals(expected, v1.pow(v2));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> powLongProvider() {
return Stream
.of(
Arguments.of(v(0), 2L, v(0)),
Arguments.of(v(2), 2L, v(4)),
Arguments.of(v(2), 8L, v(256)),
Arguments.of(v(3), 3L, v(27)),
Arguments
.of(
hv(
"0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF0F0F0"),
3L,
hv(
"0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF2A920E119A2F000")),
Arguments
.of(
v(3),
-3L,
hv(
"0x4BDA12F684BDA12F684BDA12F684BDA12F684BDA12F684BDA12F684BDA12F684BDA12F684BDA12F684BDA12F684BDA13")));
}
@ParameterizedTest
@MethodSource("modLongProvider")
void modLong(Value v1, long v2, Value expected) {
assertValueEquals(expected, v1.mod(v2));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> modLongProvider() {
return Stream
.of(
Arguments.of(v(0), 2L, v(0)),
Arguments.of(v(1), 2L, v(1)),
Arguments.of(v(2), 2L, v(0)),
Arguments.of(v(3), 2L, v(1)),
Arguments.of(biv("13492324908428420834234908342"), 2L, v(0)),
Arguments.of(biv("13492324908428420834234908343"), 2L, v(1)),
Arguments.of(v(0), 8L, v(0)),
Arguments.of(v(1), 8L, v(1)),
Arguments.of(v(2), 8L, v(2)),
Arguments.of(v(3), 8L, v(3)),
Arguments.of(v(7), 8L, v(7)),
Arguments.of(v(8), 8L, v(0)),
Arguments.of(v(9), 8L, v(1)),
Arguments.of(v(1024), 8L, v(0)),
Arguments.of(v(1026), 8L, v(2)),
Arguments.of(biv("13492324908428420834234908342"), 8L, v(6)),
Arguments.of(biv("13492324908428420834234908343"), 8L, v(7)),
Arguments.of(biv("13492324908428420834234908344"), 8L, v(0)));
}
@Test
void shouldThrowForModLongByZero() {
Throwable exception = assertThrows(ArithmeticException.class, () -> v(5).mod(0));
assertEquals("mod by zero", exception.getMessage());
}
@Test
void shouldThrowForModLongByNegative() {
Throwable exception = assertThrows(ArithmeticException.class, () -> v(5).mod(-5));
assertEquals("mod by negative", exception.getMessage());
}
@ParameterizedTest
@MethodSource("intValueProvider")
void intValue(Value value, int expected) {
assertEquals(expected, value.intValue());
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> intValueProvider() {
return Stream
.of(
Arguments.of(hv("0x"), 0),
Arguments.of(hv("0x00"), 0),
Arguments.of(hv("0x00000000"), 0),
Arguments.of(hv("0x01"), 1),
Arguments.of(hv("0x0001"), 1),
Arguments.of(hv("0x000001"), 1),
Arguments.of(hv("0x00000001"), 1),
Arguments.of(hv("0x0100"), 256),
Arguments.of(hv("0x000100"), 256),
Arguments.of(hv("0x00000100"), 256));
}
@Test
void shouldThrowForIntValueOfOversizeValue() {
Throwable exception = assertThrows(ArithmeticException.class, () -> hv("0x0100000000").intValue());
assertEquals("Value does not fit a 4 byte int", exception.getMessage());
}
@ParameterizedTest
@MethodSource("longValueProvider")
void longValue(Value value, long expected) {
assertEquals(expected, value.toLong());
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> longValueProvider() {
return Stream
.of(
Arguments.of(hv("0x"), 0L),
Arguments.of(hv("0x00"), 0L),
Arguments.of(hv("0x00000000"), 0L),
Arguments.of(hv("0x01"), 1L),
Arguments.of(hv("0x0001"), 1L),
Arguments.of(hv("0x000001"), 1L),
Arguments.of(hv("0x00000001"), 1L),
Arguments.of(hv("0x0000000001"), 1L),
Arguments.of(hv("0x000000000001"), 1L),
Arguments.of(hv("0x0100"), 256L),
Arguments.of(hv("0x000100"), 256L),
Arguments.of(hv("0x00000100"), 256L),
Arguments.of(hv("0x00000100"), 256L),
Arguments.of(hv("0x000000000100"), 256L),
Arguments.of(hv("0x00000000000100"), 256L),
Arguments.of(hv("0x0000000000000100"), 256L),
Arguments.of(hv("0xFFFFFFFF"), (1L << 32) - 1));
}
@Test
void shouldThrowForLongValueOfOversizeValue() {
Throwable exception = assertThrows(ArithmeticException.class, () -> hv("0x010000000000000000").toLong());
assertEquals("Value does not fit a 8 byte long", exception.getMessage());
}
@ParameterizedTest
@MethodSource("compareToProvider")
void compareTo(Value v1, Value v2, int expected) {
assertEquals(expected, v1.compareTo(v2));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> compareToProvider() {
return Stream
.of(
Arguments.of(v(5), v(5), 0),
Arguments.of(v(5), v(3), 1),
Arguments.of(v(5), v(6), -1),
Arguments
.of(
hv("0x0000000000000000000000000000000000000000000000000000000000000000"),
hv("0x0000000000000000000000000000000000000000000000000000000000000000"),
0),
Arguments
.of(
hv("0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"),
hv("0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"),
0),
Arguments
.of(
hv("0x000000000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"),
hv("0x000000000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"),
0),
Arguments
.of(
hv("0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"),
hv("0x0000000000000000000000000000000000000000000000000000000000000000"),
1),
Arguments
.of(
hv("0x0000000000000000000000000000000000000000000000000000000000000000"),
hv("0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"),
-1),
Arguments
.of(
hv("0x000000000000000000000000000001FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"),
hv("0x000000000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"),
1),
Arguments
.of(
hv("0x000000000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE"),
hv("0x000000000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"),
-1));
}
@ParameterizedTest
@MethodSource("toBytesProvider")
void toBytesTest(Value value, Bytes expected) {
assertEquals(expected, value.toBytes());
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> toBytesProvider() {
return Stream
.of(
Arguments
.of(
hv("0x00"),
Bytes
.fromHexString(
"000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000")),
Arguments
.of(
hv("0x01000000"),
Bytes
.fromHexString(
"0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000")),
Arguments
.of(
hv("0x0100000000"),
Bytes
.fromHexString(
"0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000")),
Arguments
.of(
hv("0xf100000000ab"),
Bytes
.fromHexString(
"0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000f100000000ab")),
Arguments
.of(
hv("0x0400000000000000000000000000000000000000000000000000f100000000ab"),
Bytes
.fromHexString(
"0x000000000000000000000000000000000400000000000000000000000000000000000000000000000000f100000000ab")));
}
@ParameterizedTest
@MethodSource("toMinimalBytesProvider")
void toMinimalBytesTest(Value value, Bytes expected) {
assertEquals(expected, value.toMinimalBytes());
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> toMinimalBytesProvider() {
return Stream
.of(
Arguments.of(hv("0x00"), Bytes.EMPTY),
Arguments.of(hv("0x01000000"), Bytes.fromHexString("0x01000000")),
Arguments.of(hv("0x0100000000"), Bytes.fromHexString("0x0100000000")),
Arguments.of(hv("0xf100000000ab"), Bytes.fromHexString("0xf100000000ab")),
Arguments
.of(
hv("0x0400000000000000000000000000000000000000000000000000f100000000ab"),
Bytes.fromHexString("0x0400000000000000000000000000000000000000000000000000f100000000ab")));
}
@ParameterizedTest
@MethodSource("numberOfLeadingZerosProvider")
void numberOfLeadingZeros(Value value, int expected) {
assertEquals(expected, value.numberOfLeadingZeros());
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> numberOfLeadingZerosProvider() {
return Stream
.of(
Arguments.of(hv("0x00"), 384),
Arguments.of(hv("0x01"), 383),
Arguments.of(hv("0x02"), 382),
Arguments.of(hv("0x03"), 382),
Arguments.of(hv("0x0F"), 380),
Arguments.of(hv("0x8F"), 376),
Arguments.of(hv("0x100000000"), 351));
}
@ParameterizedTest
@MethodSource("bitLengthProvider")
void bitLength(Value value, int expected) {
assertEquals(expected, value.bitLength());
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> bitLengthProvider() {
return Stream
.of(
Arguments.of(hv("0x00"), 0),
Arguments.of(hv("0x01"), 1),
Arguments.of(hv("0x02"), 2),
Arguments.of(hv("0x03"), 2),
Arguments.of(hv("0x0F"), 4),
Arguments.of(hv("0x8F"), 8),
Arguments.of(hv("0x100000000"), 33));
}
@ParameterizedTest
@MethodSource("addExactProvider")
void addExact(Value value, Value operand) {
assertThrows(ArithmeticException.class, () -> value.addExact(operand));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> addExactProvider() {
return Stream.of(Arguments.of(Value.MAX_VALUE, v(1)), Arguments.of(Value.MAX_VALUE, Value.MAX_VALUE));
}
@ParameterizedTest
@MethodSource("addExactLongProvider")
void addExactLong(Value value, long operand) {
assertThrows(ArithmeticException.class, () -> value.addExact(operand));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> addExactLongProvider() {
return Stream
.of(Arguments.of(Value.MAX_VALUE, 3), Arguments.of(Value.MAX_VALUE, Long.MAX_VALUE), Arguments.of(v(0), -1));
}
@ParameterizedTest
@MethodSource("subtractExactProvider")
void subtractExact(Value value, Value operand) {
assertThrows(ArithmeticException.class, () -> value.subtractExact(operand));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> subtractExactProvider() {
return Stream.of(Arguments.of(v(0), v(1)), Arguments.of(v(0), Value.MAX_VALUE));
}
@ParameterizedTest
@MethodSource("subtractExactLongProvider")
void subtractExactLong(Value value, long operand) {
assertThrows(ArithmeticException.class, () -> value.subtractExact(operand));
}
@SuppressWarnings("UnusedMethod")
private static Stream<Arguments> subtractExactLongProvider() {
return Stream.of(Arguments.of(v(0), 1), Arguments.of(v(0), Long.MAX_VALUE), Arguments.of(Value.MAX_VALUE, -1));
}
private void assertValueEquals(Value expected, Value actual) {
String msg = String.format("Expected %s but got %s", expected.toHexString(), actual.toHexString());
assertEquals(expected, actual, msg);
}
}
|
googleapis/google-cloud-java | 37,703 | java-valkey/proto-google-cloud-valkey-v1/src/main/java/com/google/cloud/memorystore/v1/BackupInstanceRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/memorystore/v1/memorystore.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.memorystore.v1;
/**
*
*
* <pre>
* Request for [BackupInstance].
* </pre>
*
* Protobuf type {@code google.cloud.memorystore.v1.BackupInstanceRequest}
*/
public final class BackupInstanceRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.memorystore.v1.BackupInstanceRequest)
BackupInstanceRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use BackupInstanceRequest.newBuilder() to construct.
private BackupInstanceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BackupInstanceRequest() {
name_ = "";
backupId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BackupInstanceRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.memorystore.v1.V1mainProto
.internal_static_google_cloud_memorystore_v1_BackupInstanceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.memorystore.v1.V1mainProto
.internal_static_google_cloud_memorystore_v1_BackupInstanceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.memorystore.v1.BackupInstanceRequest.class,
com.google.cloud.memorystore.v1.BackupInstanceRequest.Builder.class);
}
private int bitField0_;
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Instance resource name using the form:
* `projects/{project_id}/locations/{location_id}/instances/{instance_id}`
* where `location_id` refers to a Google Cloud region.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Instance resource name using the form:
* `projects/{project_id}/locations/{location_id}/instances/{instance_id}`
* where `location_id` refers to a Google Cloud region.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TTL_FIELD_NUMBER = 2;
private com.google.protobuf.Duration ttl_;
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the ttl field is set.
*/
@java.lang.Override
public boolean hasTtl() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The ttl.
*/
@java.lang.Override
public com.google.protobuf.Duration getTtl() {
return ttl_ == null ? com.google.protobuf.Duration.getDefaultInstance() : ttl_;
}
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
@java.lang.Override
public com.google.protobuf.DurationOrBuilder getTtlOrBuilder() {
return ttl_ == null ? com.google.protobuf.Duration.getDefaultInstance() : ttl_;
}
public static final int BACKUP_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object backupId_ = "";
/**
*
*
* <pre>
* Optional. The id of the backup to be created. If not specified, the
* default value ([YYYYMMDDHHMMSS]_[Shortened Instance UID] is used.
* </pre>
*
* <code>optional string backup_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the backupId field is set.
*/
@java.lang.Override
public boolean hasBackupId() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. The id of the backup to be created. If not specified, the
* default value ([YYYYMMDDHHMMSS]_[Shortened Instance UID] is used.
* </pre>
*
* <code>optional string backup_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The backupId.
*/
@java.lang.Override
public java.lang.String getBackupId() {
java.lang.Object ref = backupId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
backupId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The id of the backup to be created. If not specified, the
* default value ([YYYYMMDDHHMMSS]_[Shortened Instance UID] is used.
* </pre>
*
* <code>optional string backup_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for backupId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getBackupIdBytes() {
java.lang.Object ref = backupId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
backupId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getTtl());
}
if (((bitField0_ & 0x00000002) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, backupId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getTtl());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, backupId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.memorystore.v1.BackupInstanceRequest)) {
return super.equals(obj);
}
com.google.cloud.memorystore.v1.BackupInstanceRequest other =
(com.google.cloud.memorystore.v1.BackupInstanceRequest) obj;
if (!getName().equals(other.getName())) return false;
if (hasTtl() != other.hasTtl()) return false;
if (hasTtl()) {
if (!getTtl().equals(other.getTtl())) return false;
}
if (hasBackupId() != other.hasBackupId()) return false;
if (hasBackupId()) {
if (!getBackupId().equals(other.getBackupId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (hasTtl()) {
hash = (37 * hash) + TTL_FIELD_NUMBER;
hash = (53 * hash) + getTtl().hashCode();
}
if (hasBackupId()) {
hash = (37 * hash) + BACKUP_ID_FIELD_NUMBER;
hash = (53 * hash) + getBackupId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.memorystore.v1.BackupInstanceRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.memorystore.v1.BackupInstanceRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.memorystore.v1.BackupInstanceRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.memorystore.v1.BackupInstanceRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.memorystore.v1.BackupInstanceRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.memorystore.v1.BackupInstanceRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.memorystore.v1.BackupInstanceRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.memorystore.v1.BackupInstanceRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.memorystore.v1.BackupInstanceRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.memorystore.v1.BackupInstanceRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.memorystore.v1.BackupInstanceRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.memorystore.v1.BackupInstanceRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.memorystore.v1.BackupInstanceRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for [BackupInstance].
* </pre>
*
* Protobuf type {@code google.cloud.memorystore.v1.BackupInstanceRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.memorystore.v1.BackupInstanceRequest)
com.google.cloud.memorystore.v1.BackupInstanceRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.memorystore.v1.V1mainProto
.internal_static_google_cloud_memorystore_v1_BackupInstanceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.memorystore.v1.V1mainProto
.internal_static_google_cloud_memorystore_v1_BackupInstanceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.memorystore.v1.BackupInstanceRequest.class,
com.google.cloud.memorystore.v1.BackupInstanceRequest.Builder.class);
}
// Construct using com.google.cloud.memorystore.v1.BackupInstanceRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getTtlFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
ttl_ = null;
if (ttlBuilder_ != null) {
ttlBuilder_.dispose();
ttlBuilder_ = null;
}
backupId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.memorystore.v1.V1mainProto
.internal_static_google_cloud_memorystore_v1_BackupInstanceRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.memorystore.v1.BackupInstanceRequest getDefaultInstanceForType() {
return com.google.cloud.memorystore.v1.BackupInstanceRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.memorystore.v1.BackupInstanceRequest build() {
com.google.cloud.memorystore.v1.BackupInstanceRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.memorystore.v1.BackupInstanceRequest buildPartial() {
com.google.cloud.memorystore.v1.BackupInstanceRequest result =
new com.google.cloud.memorystore.v1.BackupInstanceRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.memorystore.v1.BackupInstanceRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.ttl_ = ttlBuilder_ == null ? ttl_ : ttlBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.backupId_ = backupId_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.memorystore.v1.BackupInstanceRequest) {
return mergeFrom((com.google.cloud.memorystore.v1.BackupInstanceRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.memorystore.v1.BackupInstanceRequest other) {
if (other == com.google.cloud.memorystore.v1.BackupInstanceRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasTtl()) {
mergeTtl(other.getTtl());
}
if (other.hasBackupId()) {
backupId_ = other.backupId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getTtlFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
backupId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Instance resource name using the form:
* `projects/{project_id}/locations/{location_id}/instances/{instance_id}`
* where `location_id` refers to a Google Cloud region.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Instance resource name using the form:
* `projects/{project_id}/locations/{location_id}/instances/{instance_id}`
* where `location_id` refers to a Google Cloud region.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Instance resource name using the form:
* `projects/{project_id}/locations/{location_id}/instances/{instance_id}`
* where `location_id` refers to a Google Cloud region.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Instance resource name using the form:
* `projects/{project_id}/locations/{location_id}/instances/{instance_id}`
* where `location_id` refers to a Google Cloud region.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Instance resource name using the form:
* `projects/{project_id}/locations/{location_id}/instances/{instance_id}`
* where `location_id` refers to a Google Cloud region.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.Duration ttl_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
ttlBuilder_;
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the ttl field is set.
*/
public boolean hasTtl() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The ttl.
*/
public com.google.protobuf.Duration getTtl() {
if (ttlBuilder_ == null) {
return ttl_ == null ? com.google.protobuf.Duration.getDefaultInstance() : ttl_;
} else {
return ttlBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder setTtl(com.google.protobuf.Duration value) {
if (ttlBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ttl_ = value;
} else {
ttlBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder setTtl(com.google.protobuf.Duration.Builder builderForValue) {
if (ttlBuilder_ == null) {
ttl_ = builderForValue.build();
} else {
ttlBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder mergeTtl(com.google.protobuf.Duration value) {
if (ttlBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& ttl_ != null
&& ttl_ != com.google.protobuf.Duration.getDefaultInstance()) {
getTtlBuilder().mergeFrom(value);
} else {
ttl_ = value;
}
} else {
ttlBuilder_.mergeFrom(value);
}
if (ttl_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder clearTtl() {
bitField0_ = (bitField0_ & ~0x00000002);
ttl_ = null;
if (ttlBuilder_ != null) {
ttlBuilder_.dispose();
ttlBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public com.google.protobuf.Duration.Builder getTtlBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTtlFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public com.google.protobuf.DurationOrBuilder getTtlOrBuilder() {
if (ttlBuilder_ != null) {
return ttlBuilder_.getMessageOrBuilder();
} else {
return ttl_ == null ? com.google.protobuf.Duration.getDefaultInstance() : ttl_;
}
}
/**
*
*
* <pre>
* Optional. TTL for the backup to expire. Value range is 1 day to 100 years.
* If not specified, the default value is 100 years.
* </pre>
*
* <code>.google.protobuf.Duration ttl = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
getTtlFieldBuilder() {
if (ttlBuilder_ == null) {
ttlBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>(getTtl(), getParentForChildren(), isClean());
ttl_ = null;
}
return ttlBuilder_;
}
private java.lang.Object backupId_ = "";
/**
*
*
* <pre>
* Optional. The id of the backup to be created. If not specified, the
* default value ([YYYYMMDDHHMMSS]_[Shortened Instance UID] is used.
* </pre>
*
* <code>optional string backup_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the backupId field is set.
*/
public boolean hasBackupId() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Optional. The id of the backup to be created. If not specified, the
* default value ([YYYYMMDDHHMMSS]_[Shortened Instance UID] is used.
* </pre>
*
* <code>optional string backup_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The backupId.
*/
public java.lang.String getBackupId() {
java.lang.Object ref = backupId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
backupId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The id of the backup to be created. If not specified, the
* default value ([YYYYMMDDHHMMSS]_[Shortened Instance UID] is used.
* </pre>
*
* <code>optional string backup_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for backupId.
*/
public com.google.protobuf.ByteString getBackupIdBytes() {
java.lang.Object ref = backupId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
backupId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The id of the backup to be created. If not specified, the
* default value ([YYYYMMDDHHMMSS]_[Shortened Instance UID] is used.
* </pre>
*
* <code>optional string backup_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The backupId to set.
* @return This builder for chaining.
*/
public Builder setBackupId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
backupId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The id of the backup to be created. If not specified, the
* default value ([YYYYMMDDHHMMSS]_[Shortened Instance UID] is used.
* </pre>
*
* <code>optional string backup_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearBackupId() {
backupId_ = getDefaultInstance().getBackupId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The id of the backup to be created. If not specified, the
* default value ([YYYYMMDDHHMMSS]_[Shortened Instance UID] is used.
* </pre>
*
* <code>optional string backup_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for backupId to set.
* @return This builder for chaining.
*/
public Builder setBackupIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
backupId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.memorystore.v1.BackupInstanceRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.memorystore.v1.BackupInstanceRequest)
private static final com.google.cloud.memorystore.v1.BackupInstanceRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.memorystore.v1.BackupInstanceRequest();
}
public static com.google.cloud.memorystore.v1.BackupInstanceRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BackupInstanceRequest> PARSER =
new com.google.protobuf.AbstractParser<BackupInstanceRequest>() {
@java.lang.Override
public BackupInstanceRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BackupInstanceRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BackupInstanceRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.memorystore.v1.BackupInstanceRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/j2objc | 36,699 | jre_emul/android/platform/libcore/ojluni/src/main/java/sun/security/util/Resources.java | /*
* Copyright (c) 2000, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.security.util;
/**
* <p> This class represents the <code>ResourceBundle</code>
* for javax.security.auth and sun.security.
*
*/
public class Resources extends java.util.ListResourceBundle {
private static final Object[][] contents = {
// shared (from jarsigner)
{"SPACE", " "},
{"2SPACE", " "},
{"6SPACE", " "},
{"COMMA", ", "},
// shared (from keytool)
{"NEWLINE", "\n"},
{"STAR",
"*******************************************"},
{"STARNN",
"*******************************************\n\n"},
// keytool: Help part
{".OPTION.", " [OPTION]..."},
{"Options.", "Options:"},
{"Use.keytool.help.for.all.available.commands",
"Use \"keytool -help\" for all available commands"},
{"Key.and.Certificate.Management.Tool",
"Key and Certificate Management Tool"},
{"Commands.", "Commands:"},
{"Use.keytool.command.name.help.for.usage.of.command.name",
"Use \"keytool -command_name -help\" for usage of command_name"},
// keytool: help: commands
{"Generates.a.certificate.request",
"Generates a certificate request"}, //-certreq
{"Changes.an.entry.s.alias",
"Changes an entry's alias"}, //-changealias
{"Deletes.an.entry",
"Deletes an entry"}, //-delete
{"Exports.certificate",
"Exports certificate"}, //-exportcert
{"Generates.a.key.pair",
"Generates a key pair"}, //-genkeypair
{"Generates.a.secret.key",
"Generates a secret key"}, //-genseckey
{"Generates.certificate.from.a.certificate.request",
"Generates certificate from a certificate request"}, //-gencert
{"Generates.CRL", "Generates CRL"}, //-gencrl
{"Imports.entries.from.a.JDK.1.1.x.style.identity.database",
"Imports entries from a JDK 1.1.x-style identity database"}, //-identitydb
{"Imports.a.certificate.or.a.certificate.chain",
"Imports a certificate or a certificate chain"}, //-importcert
{"Imports.one.or.all.entries.from.another.keystore",
"Imports one or all entries from another keystore"}, //-importkeystore
{"Clones.a.key.entry",
"Clones a key entry"}, //-keyclone
{"Changes.the.key.password.of.an.entry",
"Changes the key password of an entry"}, //-keypasswd
{"Lists.entries.in.a.keystore",
"Lists entries in a keystore"}, //-list
{"Prints.the.content.of.a.certificate",
"Prints the content of a certificate"}, //-printcert
{"Prints.the.content.of.a.certificate.request",
"Prints the content of a certificate request"}, //-printcertreq
{"Prints.the.content.of.a.CRL.file",
"Prints the content of a CRL file"}, //-printcrl
{"Generates.a.self.signed.certificate",
"Generates a self-signed certificate"}, //-selfcert
{"Changes.the.store.password.of.a.keystore",
"Changes the store password of a keystore"}, //-storepasswd
// keytool: help: options
{"alias.name.of.the.entry.to.process",
"alias name of the entry to process"}, //-alias
{"destination.alias",
"destination alias"}, //-destalias
{"destination.key.password",
"destination key password"}, //-destkeypass
{"destination.keystore.name",
"destination keystore name"}, //-destkeystore
{"destination.keystore.password.protected",
"destination keystore password protected"}, //-destprotected
{"destination.keystore.provider.name",
"destination keystore provider name"}, //-destprovidername
{"destination.keystore.password",
"destination keystore password"}, //-deststorepass
{"destination.keystore.type",
"destination keystore type"}, //-deststoretype
{"distinguished.name",
"distinguished name"}, //-dname
{"X.509.extension",
"X.509 extension"}, //-ext
{"output.file.name",
"output file name"}, //-file and -outfile
{"input.file.name",
"input file name"}, //-file and -infile
{"key.algorithm.name",
"key algorithm name"}, //-keyalg
{"key.password",
"key password"}, //-keypass
{"key.bit.size",
"key bit size"}, //-keysize
{"keystore.name",
"keystore name"}, //-keystore
{"new.password",
"new password"}, //-new
{"do.not.prompt",
"do not prompt"}, //-noprompt
{"password.through.protected.mechanism",
"password through protected mechanism"}, //-protected
{"provider.argument",
"provider argument"}, //-providerarg
{"provider.class.name",
"provider class name"}, //-providerclass
{"provider.name",
"provider name"}, //-providername
{"provider.classpath",
"provider classpath"}, //-providerpath
{"output.in.RFC.style",
"output in RFC style"}, //-rfc
{"signature.algorithm.name",
"signature algorithm name"}, //-sigalg
{"source.alias",
"source alias"}, //-srcalias
{"source.key.password",
"source key password"}, //-srckeypass
{"source.keystore.name",
"source keystore name"}, //-srckeystore
{"source.keystore.password.protected",
"source keystore password protected"}, //-srcprotected
{"source.keystore.provider.name",
"source keystore provider name"}, //-srcprovidername
{"source.keystore.password",
"source keystore password"}, //-srcstorepass
{"source.keystore.type",
"source keystore type"}, //-srcstoretype
{"SSL.server.host.and.port",
"SSL server host and port"}, //-sslserver
{"signed.jar.file",
"signed jar file"}, //=jarfile
{"certificate.validity.start.date.time",
"certificate validity start date/time"}, //-startdate
{"keystore.password",
"keystore password"}, //-storepass
{"keystore.type",
"keystore type"}, //-storetype
{"trust.certificates.from.cacerts",
"trust certificates from cacerts"}, //-trustcacerts
{"verbose.output",
"verbose output"}, //-v
{"validity.number.of.days",
"validity number of days"}, //-validity
{"Serial.ID.of.cert.to.revoke",
"Serial ID of cert to revoke"}, //-id
// keytool: Running part
{"keytool.error.", "keytool error: "},
{"Illegal.option.", "Illegal option: "},
{"Illegal.value.", "Illegal value: "},
{"Unknown.password.type.", "Unknown password type: "},
{"Cannot.find.environment.variable.",
"Cannot find environment variable: "},
{"Cannot.find.file.", "Cannot find file: "},
{"Command.option.flag.needs.an.argument.", "Command option {0} needs an argument."},
{"Warning.Different.store.and.key.passwords.not.supported.for.PKCS12.KeyStores.Ignoring.user.specified.command.value.",
"Warning: Different store and key passwords not supported for PKCS12 KeyStores. Ignoring user-specified {0} value."},
{".keystore.must.be.NONE.if.storetype.is.{0}",
"-keystore must be NONE if -storetype is {0}"},
{"Too.many.retries.program.terminated",
"Too many retries, program terminated"},
{".storepasswd.and.keypasswd.commands.not.supported.if.storetype.is.{0}",
"-storepasswd and -keypasswd commands not supported if -storetype is {0}"},
{".keypasswd.commands.not.supported.if.storetype.is.PKCS12",
"-keypasswd commands not supported if -storetype is PKCS12"},
{".keypass.and.new.can.not.be.specified.if.storetype.is.{0}",
"-keypass and -new can not be specified if -storetype is {0}"},
{"if.protected.is.specified.then.storepass.keypass.and.new.must.not.be.specified",
"if -protected is specified, then -storepass, -keypass, and -new must not be specified"},
{"if.srcprotected.is.specified.then.srcstorepass.and.srckeypass.must.not.be.specified",
"if -srcprotected is specified, then -srcstorepass and -srckeypass must not be specified"},
{"if.keystore.is.not.password.protected.then.storepass.keypass.and.new.must.not.be.specified",
"if keystore is not password protected, then -storepass, -keypass, and -new must not be specified"},
{"if.source.keystore.is.not.password.protected.then.srcstorepass.and.srckeypass.must.not.be.specified",
"if source keystore is not password protected, then -srcstorepass and -srckeypass must not be specified"},
{"Illegal.startdate.value", "Illegal startdate value"},
{"Validity.must.be.greater.than.zero",
"Validity must be greater than zero"},
{"provName.not.a.provider", "{0} not a provider"},
{"Usage.error.no.command.provided", "Usage error: no command provided"},
{"Source.keystore.file.exists.but.is.empty.", "Source keystore file exists, but is empty: "},
{"Please.specify.srckeystore", "Please specify -srckeystore"},
{"Must.not.specify.both.v.and.rfc.with.list.command",
"Must not specify both -v and -rfc with 'list' command"},
{"Key.password.must.be.at.least.6.characters",
"Key password must be at least 6 characters"},
{"New.password.must.be.at.least.6.characters",
"New password must be at least 6 characters"},
{"Keystore.file.exists.but.is.empty.",
"Keystore file exists, but is empty: "},
{"Keystore.file.does.not.exist.",
"Keystore file does not exist: "},
{"Must.specify.destination.alias", "Must specify destination alias"},
{"Must.specify.alias", "Must specify alias"},
{"Keystore.password.must.be.at.least.6.characters",
"Keystore password must be at least 6 characters"},
{"Enter.keystore.password.", "Enter keystore password: "},
{"Enter.source.keystore.password.", "Enter source keystore password: "},
{"Enter.destination.keystore.password.", "Enter destination keystore password: "},
{"Keystore.password.is.too.short.must.be.at.least.6.characters",
"Keystore password is too short - must be at least 6 characters"},
{"Unknown.Entry.Type", "Unknown Entry Type"},
{"Too.many.failures.Alias.not.changed", "Too many failures. Alias not changed"},
{"Entry.for.alias.alias.successfully.imported.",
"Entry for alias {0} successfully imported."},
{"Entry.for.alias.alias.not.imported.", "Entry for alias {0} not imported."},
{"Problem.importing.entry.for.alias.alias.exception.Entry.for.alias.alias.not.imported.",
"Problem importing entry for alias {0}: {1}.\nEntry for alias {0} not imported."},
{"Import.command.completed.ok.entries.successfully.imported.fail.entries.failed.or.cancelled",
"Import command completed: {0} entries successfully imported, {1} entries failed or cancelled"},
{"Warning.Overwriting.existing.alias.alias.in.destination.keystore",
"Warning: Overwriting existing alias {0} in destination keystore"},
{"Existing.entry.alias.alias.exists.overwrite.no.",
"Existing entry alias {0} exists, overwrite? [no]: "},
{"Too.many.failures.try.later", "Too many failures - try later"},
{"Certification.request.stored.in.file.filename.",
"Certification request stored in file <{0}>"},
{"Submit.this.to.your.CA", "Submit this to your CA"},
{"if.alias.not.specified.destalias.srckeypass.and.destkeypass.must.not.be.specified",
"if alias not specified, destalias, srckeypass, and destkeypass must not be specified"},
{"Certificate.stored.in.file.filename.",
"Certificate stored in file <{0}>"},
{"Certificate.reply.was.installed.in.keystore",
"Certificate reply was installed in keystore"},
{"Certificate.reply.was.not.installed.in.keystore",
"Certificate reply was not installed in keystore"},
{"Certificate.was.added.to.keystore",
"Certificate was added to keystore"},
{"Certificate.was.not.added.to.keystore",
"Certificate was not added to keystore"},
{".Storing.ksfname.", "[Storing {0}]"},
{"alias.has.no.public.key.certificate.",
"{0} has no public key (certificate)"},
{"Cannot.derive.signature.algorithm",
"Cannot derive signature algorithm"},
{"Alias.alias.does.not.exist",
"Alias <{0}> does not exist"},
{"Alias.alias.has.no.certificate",
"Alias <{0}> has no certificate"},
{"Key.pair.not.generated.alias.alias.already.exists",
"Key pair not generated, alias <{0}> already exists"},
{"Generating.keysize.bit.keyAlgName.key.pair.and.self.signed.certificate.sigAlgName.with.a.validity.of.validality.days.for",
"Generating {0} bit {1} key pair and self-signed certificate ({2}) with a validity of {3} days\n\tfor: {4}"},
{"Enter.key.password.for.alias.", "Enter key password for <{0}>"},
{".RETURN.if.same.as.keystore.password.",
"\t(RETURN if same as keystore password): "},
{"Key.password.is.too.short.must.be.at.least.6.characters",
"Key password is too short - must be at least 6 characters"},
{"Too.many.failures.key.not.added.to.keystore",
"Too many failures - key not added to keystore"},
{"Destination.alias.dest.already.exists",
"Destination alias <{0}> already exists"},
{"Password.is.too.short.must.be.at.least.6.characters",
"Password is too short - must be at least 6 characters"},
{"Too.many.failures.Key.entry.not.cloned",
"Too many failures. Key entry not cloned"},
{"key.password.for.alias.", "key password for <{0}>"},
{"Keystore.entry.for.id.getName.already.exists",
"Keystore entry for <{0}> already exists"},
{"Creating.keystore.entry.for.id.getName.",
"Creating keystore entry for <{0}> ..."},
{"No.entries.from.identity.database.added",
"No entries from identity database added"},
{"Alias.name.alias", "Alias name: {0}"},
{"Creation.date.keyStore.getCreationDate.alias.",
"Creation date: {0,date}"},
{"alias.keyStore.getCreationDate.alias.",
"{0}, {1,date}, "},
{"alias.", "{0}, "},
{"Entry.type.type.", "Entry type: {0}"},
{"Certificate.chain.length.", "Certificate chain length: "},
{"Certificate.i.1.", "Certificate[{0,number,integer}]:"},
{"Certificate.fingerprint.SHA1.", "Certificate fingerprint (SHA1): "},
{"Keystore.type.", "Keystore type: "},
{"Keystore.provider.", "Keystore provider: "},
{"Your.keystore.contains.keyStore.size.entry",
"Your keystore contains {0,number,integer} entry"},
{"Your.keystore.contains.keyStore.size.entries",
"Your keystore contains {0,number,integer} entries"},
{"Failed.to.parse.input", "Failed to parse input"},
{"Empty.input", "Empty input"},
{"Not.X.509.certificate", "Not X.509 certificate"},
{"alias.has.no.public.key", "{0} has no public key"},
{"alias.has.no.X.509.certificate", "{0} has no X.509 certificate"},
{"New.certificate.self.signed.", "New certificate (self-signed):"},
{"Reply.has.no.certificates", "Reply has no certificates"},
{"Certificate.not.imported.alias.alias.already.exists",
"Certificate not imported, alias <{0}> already exists"},
{"Input.not.an.X.509.certificate", "Input not an X.509 certificate"},
{"Certificate.already.exists.in.keystore.under.alias.trustalias.",
"Certificate already exists in keystore under alias <{0}>"},
{"Do.you.still.want.to.add.it.no.",
"Do you still want to add it? [no]: "},
{"Certificate.already.exists.in.system.wide.CA.keystore.under.alias.trustalias.",
"Certificate already exists in system-wide CA keystore under alias <{0}>"},
{"Do.you.still.want.to.add.it.to.your.own.keystore.no.",
"Do you still want to add it to your own keystore? [no]: "},
{"Trust.this.certificate.no.", "Trust this certificate? [no]: "},
{"YES", "YES"},
{"New.prompt.", "New {0}: "},
{"Passwords.must.differ", "Passwords must differ"},
{"Re.enter.new.prompt.", "Re-enter new {0}: "},
{"Re.enter.new.password.", "Re-enter new password: "},
{"They.don.t.match.Try.again", "They don't match. Try again"},
{"Enter.prompt.alias.name.", "Enter {0} alias name: "},
{"Enter.new.alias.name.RETURN.to.cancel.import.for.this.entry.",
"Enter new alias name\t(RETURN to cancel import for this entry): "},
{"Enter.alias.name.", "Enter alias name: "},
{".RETURN.if.same.as.for.otherAlias.",
"\t(RETURN if same as for <{0}>)"},
{".PATTERN.printX509Cert",
"Owner: {0}\nIssuer: {1}\nSerial number: {2}\nValid from: {3} until: {4}\nCertificate fingerprints:\n\t MD5: {5}\n\t SHA1: {6}\n\t SHA256: {7}\n\t Signature algorithm name: {8}\n\t Version: {9}"},
{"What.is.your.first.and.last.name.",
"What is your first and last name?"},
{"What.is.the.name.of.your.organizational.unit.",
"What is the name of your organizational unit?"},
{"What.is.the.name.of.your.organization.",
"What is the name of your organization?"},
{"What.is.the.name.of.your.City.or.Locality.",
"What is the name of your City or Locality?"},
{"What.is.the.name.of.your.State.or.Province.",
"What is the name of your State or Province?"},
{"What.is.the.two.letter.country.code.for.this.unit.",
"What is the two-letter country code for this unit?"},
{"Is.name.correct.", "Is {0} correct?"},
{"no", "no"},
{"yes", "yes"},
{"y", "y"},
{".defaultValue.", " [{0}]: "},
{"Alias.alias.has.no.key",
"Alias <{0}> has no key"},
{"Alias.alias.references.an.entry.type.that.is.not.a.private.key.entry.The.keyclone.command.only.supports.cloning.of.private.key",
"Alias <{0}> references an entry type that is not a private key entry. The -keyclone command only supports cloning of private key entries"},
{".WARNING.WARNING.WARNING.",
"***************** WARNING WARNING WARNING *****************"},
{"Signer.d.", "Signer #%d:"},
{"Timestamp.", "Timestamp:"},
{"Signature.", "Signature:"},
{"CRLs.", "CRLs:"},
{"Certificate.owner.", "Certificate owner: "},
{"Not.a.signed.jar.file", "Not a signed jar file"},
{"No.certificate.from.the.SSL.server",
"No certificate from the SSL server"},
{".The.integrity.of.the.information.stored.in.your.keystore.",
"* The integrity of the information stored in your keystore *\n" +
"* has NOT been verified! In order to verify its integrity, *\n" +
"* you must provide your keystore password. *"},
{".The.integrity.of.the.information.stored.in.the.srckeystore.",
"* The integrity of the information stored in the srckeystore*\n" +
"* has NOT been verified! In order to verify its integrity, *\n" +
"* you must provide the srckeystore password. *"},
{"Certificate.reply.does.not.contain.public.key.for.alias.",
"Certificate reply does not contain public key for <{0}>"},
{"Incomplete.certificate.chain.in.reply",
"Incomplete certificate chain in reply"},
{"Certificate.chain.in.reply.does.not.verify.",
"Certificate chain in reply does not verify: "},
{"Top.level.certificate.in.reply.",
"Top-level certificate in reply:\n"},
{".is.not.trusted.", "... is not trusted. "},
{"Install.reply.anyway.no.", "Install reply anyway? [no]: "},
{"NO", "NO"},
{"Public.keys.in.reply.and.keystore.don.t.match",
"Public keys in reply and keystore don't match"},
{"Certificate.reply.and.certificate.in.keystore.are.identical",
"Certificate reply and certificate in keystore are identical"},
{"Failed.to.establish.chain.from.reply",
"Failed to establish chain from reply"},
{"n", "n"},
{"Wrong.answer.try.again", "Wrong answer, try again"},
{"Secret.key.not.generated.alias.alias.already.exists",
"Secret Key not generated, alias <{0}> already exists"},
{"Please.provide.keysize.for.secret.key.generation",
"Please provide -keysize for secret key generation"},
{"Extensions.", "Extensions: "},
{".Empty.value.", "(Empty value)"},
{"Extension.Request.", "Extension Request:"},
{"PKCS.10.Certificate.Request.Version.1.0.Subject.s.Public.Key.s.format.s.key.",
"PKCS #10 Certificate Request (Version 1.0)\n" +
"Subject: %s\nPublic Key: %s format %s key\n"},
{"Unknown.keyUsage.type.", "Unknown keyUsage type: "},
{"Unknown.extendedkeyUsage.type.", "Unknown extendedkeyUsage type: "},
{"Unknown.AccessDescription.type.", "Unknown AccessDescription type: "},
{"Unrecognized.GeneralName.type.", "Unrecognized GeneralName type: "},
{"This.extension.cannot.be.marked.as.critical.",
"This extension cannot be marked as critical. "},
{"Odd.number.of.hex.digits.found.", "Odd number of hex digits found: "},
{"Unknown.extension.type.", "Unknown extension type: "},
{"command.{0}.is.ambiguous.", "command {0} is ambiguous:"},
// policytool
{"Warning.A.public.key.for.alias.signers.i.does.not.exist.Make.sure.a.KeyStore.is.properly.configured.",
"Warning: A public key for alias {0} does not exist. Make sure a KeyStore is properly configured."},
{"Warning.Class.not.found.class", "Warning: Class not found: {0}"},
{"Warning.Invalid.argument.s.for.constructor.arg",
"Warning: Invalid argument(s) for constructor: {0}"},
{"Illegal.Principal.Type.type", "Illegal Principal Type: {0}"},
{"Illegal.option.option", "Illegal option: {0}"},
{"Usage.policytool.options.", "Usage: policytool [options]"},
{".file.file.policy.file.location",
" [-file <file>] policy file location"},
{"New", "New"},
{"Open", "Open"},
{"Save", "Save"},
{"Save.As", "Save As"},
{"View.Warning.Log", "View Warning Log"},
{"Exit", "Exit"},
{"Add.Policy.Entry", "Add Policy Entry"},
{"Edit.Policy.Entry", "Edit Policy Entry"},
{"Remove.Policy.Entry", "Remove Policy Entry"},
{"Edit", "Edit"},
{"Retain", "Retain"},
{"Warning.File.name.may.include.escaped.backslash.characters.It.is.not.necessary.to.escape.backslash.characters.the.tool.escapes",
"Warning: File name may include escaped backslash characters. " +
"It is not necessary to escape backslash characters " +
"(the tool escapes characters as necessary when writing " +
"the policy contents to the persistent store).\n\n" +
"Click on Retain to retain the entered name, or click on " +
"Edit to edit the name."},
{"Add.Public.Key.Alias", "Add Public Key Alias"},
{"Remove.Public.Key.Alias", "Remove Public Key Alias"},
{"File", "File"},
{"KeyStore", "KeyStore"},
{"Policy.File.", "Policy File:"},
{"Could.not.open.policy.file.policyFile.e.toString.",
"Could not open policy file: {0}: {1}"},
{"Policy.Tool", "Policy Tool"},
{"Errors.have.occurred.while.opening.the.policy.configuration.View.the.Warning.Log.for.more.information.",
"Errors have occurred while opening the policy configuration. View the Warning Log for more information."},
{"Error", "Error"},
{"OK", "OK"},
{"Status", "Status"},
{"Warning", "Warning"},
{"Permission.",
"Permission: "},
{"Principal.Type.", "Principal Type:"},
{"Principal.Name.", "Principal Name:"},
{"Target.Name.",
"Target Name: "},
{"Actions.",
"Actions: "},
{"OK.to.overwrite.existing.file.filename.",
"OK to overwrite existing file {0}?"},
{"Cancel", "Cancel"},
{"CodeBase.", "CodeBase:"},
{"SignedBy.", "SignedBy:"},
{"Add.Principal", "Add Principal"},
{"Edit.Principal", "Edit Principal"},
{"Remove.Principal", "Remove Principal"},
{"Principals.", "Principals:"},
{".Add.Permission", " Add Permission"},
{".Edit.Permission", " Edit Permission"},
{"Remove.Permission", "Remove Permission"},
{"Done", "Done"},
{"KeyStore.URL.", "KeyStore URL:"},
{"KeyStore.Type.", "KeyStore Type:"},
{"KeyStore.Provider.", "KeyStore Provider:"},
{"KeyStore.Password.URL.", "KeyStore Password URL:"},
{"Principals", "Principals"},
{".Edit.Principal.", " Edit Principal:"},
{".Add.New.Principal.", " Add New Principal:"},
{"Permissions", "Permissions"},
{".Edit.Permission.", " Edit Permission:"},
{".Add.New.Permission.", " Add New Permission:"},
{"Signed.By.", "Signed By:"},
{"Cannot.Specify.Principal.with.a.Wildcard.Class.without.a.Wildcard.Name",
"Cannot Specify Principal with a Wildcard Class without a Wildcard Name"},
{"Cannot.Specify.Principal.without.a.Name",
"Cannot Specify Principal without a Name"},
{"Permission.and.Target.Name.must.have.a.value",
"Permission and Target Name must have a value"},
{"Remove.this.Policy.Entry.", "Remove this Policy Entry?"},
{"Overwrite.File", "Overwrite File"},
{"Policy.successfully.written.to.filename",
"Policy successfully written to {0}"},
{"null.filename", "null filename"},
{"Save.changes.", "Save changes?"},
{"Yes", "Yes"},
{"No", "No"},
{"Policy.Entry", "Policy Entry"},
{"Save.Changes", "Save Changes"},
{"No.Policy.Entry.selected", "No Policy Entry selected"},
{"Unable.to.open.KeyStore.ex.toString.",
"Unable to open KeyStore: {0}"},
{"No.principal.selected", "No principal selected"},
{"No.permission.selected", "No permission selected"},
{"name", "name"},
{"configuration.type", "configuration type"},
{"environment.variable.name", "environment variable name"},
{"library.name", "library name"},
{"package.name", "package name"},
{"policy.type", "policy type"},
{"property.name", "property name"},
{"Principal.List", "Principal List"},
{"Permission.List", "Permission List"},
{"Code.Base", "Code Base"},
{"KeyStore.U.R.L.", "KeyStore U R L:"},
{"KeyStore.Password.U.R.L.", "KeyStore Password U R L:"},
// javax.security.auth.PrivateCredentialPermission
{"invalid.null.input.s.", "invalid null input(s)"},
{"actions.can.only.be.read.", "actions can only be 'read'"},
{"permission.name.name.syntax.invalid.",
"permission name [{0}] syntax invalid: "},
{"Credential.Class.not.followed.by.a.Principal.Class.and.Name",
"Credential Class not followed by a Principal Class and Name"},
{"Principal.Class.not.followed.by.a.Principal.Name",
"Principal Class not followed by a Principal Name"},
{"Principal.Name.must.be.surrounded.by.quotes",
"Principal Name must be surrounded by quotes"},
{"Principal.Name.missing.end.quote",
"Principal Name missing end quote"},
{"PrivateCredentialPermission.Principal.Class.can.not.be.a.wildcard.value.if.Principal.Name.is.not.a.wildcard.value",
"PrivateCredentialPermission Principal Class can not be a wildcard (*) value if Principal Name is not a wildcard (*) value"},
{"CredOwner.Principal.Class.class.Principal.Name.name",
"CredOwner:\n\tPrincipal Class = {0}\n\tPrincipal Name = {1}"},
// javax.security.auth.x500
{"provided.null.name", "provided null name"},
{"provided.null.keyword.map", "provided null keyword map"},
{"provided.null.OID.map", "provided null OID map"},
// javax.security.auth.Subject
{"invalid.null.AccessControlContext.provided",
"invalid null AccessControlContext provided"},
{"invalid.null.action.provided", "invalid null action provided"},
{"invalid.null.Class.provided", "invalid null Class provided"},
{"Subject.", "Subject:\n"},
{".Principal.", "\tPrincipal: "},
{".Public.Credential.", "\tPublic Credential: "},
{".Private.Credentials.inaccessible.",
"\tPrivate Credentials inaccessible\n"},
{".Private.Credential.", "\tPrivate Credential: "},
{".Private.Credential.inaccessible.",
"\tPrivate Credential inaccessible\n"},
{"Subject.is.read.only", "Subject is read-only"},
{"attempting.to.add.an.object.which.is.not.an.instance.of.java.security.Principal.to.a.Subject.s.Principal.Set",
"attempting to add an object which is not an instance of java.security.Principal to a Subject's Principal Set"},
{"attempting.to.add.an.object.which.is.not.an.instance.of.class",
"attempting to add an object which is not an instance of {0}"},
// javax.security.auth.login.AppConfigurationEntry
{"LoginModuleControlFlag.", "LoginModuleControlFlag: "},
// javax.security.auth.login.LoginContext
{"Invalid.null.input.name", "Invalid null input: name"},
{"No.LoginModules.configured.for.name",
"No LoginModules configured for {0}"},
{"invalid.null.Subject.provided", "invalid null Subject provided"},
{"invalid.null.CallbackHandler.provided",
"invalid null CallbackHandler provided"},
{"null.subject.logout.called.before.login",
"null subject - logout called before login"},
{"unable.to.instantiate.LoginModule.module.because.it.does.not.provide.a.no.argument.constructor",
"unable to instantiate LoginModule, {0}, because it does not provide a no-argument constructor"},
{"unable.to.instantiate.LoginModule",
"unable to instantiate LoginModule"},
{"unable.to.instantiate.LoginModule.",
"unable to instantiate LoginModule: "},
{"unable.to.find.LoginModule.class.",
"unable to find LoginModule class: "},
{"unable.to.access.LoginModule.",
"unable to access LoginModule: "},
{"Login.Failure.all.modules.ignored",
"Login Failure: all modules ignored"},
// sun.security.provider.PolicyFile
{"java.security.policy.error.parsing.policy.message",
"java.security.policy: error parsing {0}:\n\t{1}"},
{"java.security.policy.error.adding.Permission.perm.message",
"java.security.policy: error adding Permission, {0}:\n\t{1}"},
{"java.security.policy.error.adding.Entry.message",
"java.security.policy: error adding Entry:\n\t{0}"},
{"alias.name.not.provided.pe.name.", "alias name not provided ({0})"},
{"unable.to.perform.substitution.on.alias.suffix",
"unable to perform substitution on alias, {0}"},
{"substitution.value.prefix.unsupported",
"substitution value, {0}, unsupported"},
{"LPARAM", "("},
{"RPARAM", ")"},
{"type.can.t.be.null","type can't be null"},
// sun.security.provider.PolicyParser
{"keystorePasswordURL.can.not.be.specified.without.also.specifying.keystore",
"keystorePasswordURL can not be specified without also specifying keystore"},
{"expected.keystore.type", "expected keystore type"},
{"expected.keystore.provider", "expected keystore provider"},
{"multiple.Codebase.expressions",
"multiple Codebase expressions"},
{"multiple.SignedBy.expressions","multiple SignedBy expressions"},
{"SignedBy.has.empty.alias","SignedBy has empty alias"},
{"can.not.specify.Principal.with.a.wildcard.class.without.a.wildcard.name",
"can not specify Principal with a wildcard class without a wildcard name"},
{"expected.codeBase.or.SignedBy.or.Principal",
"expected codeBase or SignedBy or Principal"},
{"expected.permission.entry", "expected permission entry"},
{"number.", "number "},
{"expected.expect.read.end.of.file.",
"expected [{0}], read [end of file]"},
{"expected.read.end.of.file.",
"expected [;], read [end of file]"},
{"line.number.msg", "line {0}: {1}"},
{"line.number.expected.expect.found.actual.",
"line {0}: expected [{1}], found [{2}]"},
{"null.principalClass.or.principalName",
"null principalClass or principalName"},
// sun.security.pkcs11.SunPKCS11
{"PKCS11.Token.providerName.Password.",
"PKCS11 Token [{0}] Password: "},
/* --- DEPRECATED --- */
// javax.security.auth.Policy
{"unable.to.instantiate.Subject.based.policy",
"unable to instantiate Subject-based policy"}
};
/**
* Returns the contents of this <code>ResourceBundle</code>.
*
* <p>
*
* @return the contents of this <code>ResourceBundle</code>.
*/
public Object[][] getContents() {
return contents;
}
}
|
googleapis/google-cloud-java | 37,710 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListCustomJobsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/job_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Response message for
* [JobService.ListCustomJobs][google.cloud.aiplatform.v1beta1.JobService.ListCustomJobs]
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.ListCustomJobsResponse}
*/
public final class ListCustomJobsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ListCustomJobsResponse)
ListCustomJobsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListCustomJobsResponse.newBuilder() to construct.
private ListCustomJobsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListCustomJobsResponse() {
customJobs_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListCustomJobsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.JobServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListCustomJobsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.JobServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListCustomJobsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse.class,
com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse.Builder.class);
}
public static final int CUSTOM_JOBS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1beta1.CustomJob> customJobs_;
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1beta1.CustomJob> getCustomJobsList() {
return customJobs_;
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.aiplatform.v1beta1.CustomJobOrBuilder>
getCustomJobsOrBuilderList() {
return customJobs_;
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
@java.lang.Override
public int getCustomJobsCount() {
return customJobs_.size();
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.CustomJob getCustomJobs(int index) {
return customJobs_.get(index);
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.CustomJobOrBuilder getCustomJobsOrBuilder(int index) {
return customJobs_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListCustomJobsRequest.page_token][google.cloud.aiplatform.v1beta1.ListCustomJobsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListCustomJobsRequest.page_token][google.cloud.aiplatform.v1beta1.ListCustomJobsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < customJobs_.size(); i++) {
output.writeMessage(1, customJobs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < customJobs_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, customJobs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse other =
(com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse) obj;
if (!getCustomJobsList().equals(other.getCustomJobsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getCustomJobsCount() > 0) {
hash = (37 * hash) + CUSTOM_JOBS_FIELD_NUMBER;
hash = (53 * hash) + getCustomJobsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [JobService.ListCustomJobs][google.cloud.aiplatform.v1beta1.JobService.ListCustomJobs]
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.ListCustomJobsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.ListCustomJobsResponse)
com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.JobServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListCustomJobsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.JobServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListCustomJobsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse.class,
com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (customJobsBuilder_ == null) {
customJobs_ = java.util.Collections.emptyList();
} else {
customJobs_ = null;
customJobsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.JobServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListCustomJobsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse build() {
com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse buildPartial() {
com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse result =
new com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse result) {
if (customJobsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
customJobs_ = java.util.Collections.unmodifiableList(customJobs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.customJobs_ = customJobs_;
} else {
result.customJobs_ = customJobsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse other) {
if (other == com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse.getDefaultInstance())
return this;
if (customJobsBuilder_ == null) {
if (!other.customJobs_.isEmpty()) {
if (customJobs_.isEmpty()) {
customJobs_ = other.customJobs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureCustomJobsIsMutable();
customJobs_.addAll(other.customJobs_);
}
onChanged();
}
} else {
if (!other.customJobs_.isEmpty()) {
if (customJobsBuilder_.isEmpty()) {
customJobsBuilder_.dispose();
customJobsBuilder_ = null;
customJobs_ = other.customJobs_;
bitField0_ = (bitField0_ & ~0x00000001);
customJobsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getCustomJobsFieldBuilder()
: null;
} else {
customJobsBuilder_.addAllMessages(other.customJobs_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1beta1.CustomJob m =
input.readMessage(
com.google.cloud.aiplatform.v1beta1.CustomJob.parser(), extensionRegistry);
if (customJobsBuilder_ == null) {
ensureCustomJobsIsMutable();
customJobs_.add(m);
} else {
customJobsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.aiplatform.v1beta1.CustomJob> customJobs_ =
java.util.Collections.emptyList();
private void ensureCustomJobsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
customJobs_ =
new java.util.ArrayList<com.google.cloud.aiplatform.v1beta1.CustomJob>(customJobs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.CustomJob,
com.google.cloud.aiplatform.v1beta1.CustomJob.Builder,
com.google.cloud.aiplatform.v1beta1.CustomJobOrBuilder>
customJobsBuilder_;
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1beta1.CustomJob> getCustomJobsList() {
if (customJobsBuilder_ == null) {
return java.util.Collections.unmodifiableList(customJobs_);
} else {
return customJobsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
public int getCustomJobsCount() {
if (customJobsBuilder_ == null) {
return customJobs_.size();
} else {
return customJobsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
public com.google.cloud.aiplatform.v1beta1.CustomJob getCustomJobs(int index) {
if (customJobsBuilder_ == null) {
return customJobs_.get(index);
} else {
return customJobsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
public Builder setCustomJobs(int index, com.google.cloud.aiplatform.v1beta1.CustomJob value) {
if (customJobsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCustomJobsIsMutable();
customJobs_.set(index, value);
onChanged();
} else {
customJobsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
public Builder setCustomJobs(
int index, com.google.cloud.aiplatform.v1beta1.CustomJob.Builder builderForValue) {
if (customJobsBuilder_ == null) {
ensureCustomJobsIsMutable();
customJobs_.set(index, builderForValue.build());
onChanged();
} else {
customJobsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
public Builder addCustomJobs(com.google.cloud.aiplatform.v1beta1.CustomJob value) {
if (customJobsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCustomJobsIsMutable();
customJobs_.add(value);
onChanged();
} else {
customJobsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
public Builder addCustomJobs(int index, com.google.cloud.aiplatform.v1beta1.CustomJob value) {
if (customJobsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCustomJobsIsMutable();
customJobs_.add(index, value);
onChanged();
} else {
customJobsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
public Builder addCustomJobs(
com.google.cloud.aiplatform.v1beta1.CustomJob.Builder builderForValue) {
if (customJobsBuilder_ == null) {
ensureCustomJobsIsMutable();
customJobs_.add(builderForValue.build());
onChanged();
} else {
customJobsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
public Builder addCustomJobs(
int index, com.google.cloud.aiplatform.v1beta1.CustomJob.Builder builderForValue) {
if (customJobsBuilder_ == null) {
ensureCustomJobsIsMutable();
customJobs_.add(index, builderForValue.build());
onChanged();
} else {
customJobsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
public Builder addAllCustomJobs(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1beta1.CustomJob> values) {
if (customJobsBuilder_ == null) {
ensureCustomJobsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, customJobs_);
onChanged();
} else {
customJobsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
public Builder clearCustomJobs() {
if (customJobsBuilder_ == null) {
customJobs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
customJobsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
public Builder removeCustomJobs(int index) {
if (customJobsBuilder_ == null) {
ensureCustomJobsIsMutable();
customJobs_.remove(index);
onChanged();
} else {
customJobsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
public com.google.cloud.aiplatform.v1beta1.CustomJob.Builder getCustomJobsBuilder(int index) {
return getCustomJobsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
public com.google.cloud.aiplatform.v1beta1.CustomJobOrBuilder getCustomJobsOrBuilder(
int index) {
if (customJobsBuilder_ == null) {
return customJobs_.get(index);
} else {
return customJobsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
public java.util.List<? extends com.google.cloud.aiplatform.v1beta1.CustomJobOrBuilder>
getCustomJobsOrBuilderList() {
if (customJobsBuilder_ != null) {
return customJobsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(customJobs_);
}
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
public com.google.cloud.aiplatform.v1beta1.CustomJob.Builder addCustomJobsBuilder() {
return getCustomJobsFieldBuilder()
.addBuilder(com.google.cloud.aiplatform.v1beta1.CustomJob.getDefaultInstance());
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
public com.google.cloud.aiplatform.v1beta1.CustomJob.Builder addCustomJobsBuilder(int index) {
return getCustomJobsFieldBuilder()
.addBuilder(index, com.google.cloud.aiplatform.v1beta1.CustomJob.getDefaultInstance());
}
/**
*
*
* <pre>
* List of CustomJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1beta1.CustomJob custom_jobs = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1beta1.CustomJob.Builder>
getCustomJobsBuilderList() {
return getCustomJobsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.CustomJob,
com.google.cloud.aiplatform.v1beta1.CustomJob.Builder,
com.google.cloud.aiplatform.v1beta1.CustomJobOrBuilder>
getCustomJobsFieldBuilder() {
if (customJobsBuilder_ == null) {
customJobsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.CustomJob,
com.google.cloud.aiplatform.v1beta1.CustomJob.Builder,
com.google.cloud.aiplatform.v1beta1.CustomJobOrBuilder>(
customJobs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
customJobs_ = null;
}
return customJobsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListCustomJobsRequest.page_token][google.cloud.aiplatform.v1beta1.ListCustomJobsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListCustomJobsRequest.page_token][google.cloud.aiplatform.v1beta1.ListCustomJobsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListCustomJobsRequest.page_token][google.cloud.aiplatform.v1beta1.ListCustomJobsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListCustomJobsRequest.page_token][google.cloud.aiplatform.v1beta1.ListCustomJobsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListCustomJobsRequest.page_token][google.cloud.aiplatform.v1beta1.ListCustomJobsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ListCustomJobsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ListCustomJobsResponse)
private static final com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse();
}
public static com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListCustomJobsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListCustomJobsResponse>() {
@java.lang.Override
public ListCustomJobsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListCustomJobsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListCustomJobsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ListCustomJobsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/flink | 38,015 | flink-tests/src/test/java/org/apache/flink/test/checkpointing/EventTimeWindowCheckpointingITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.test.checkpointing;
import org.apache.flink.api.common.functions.OpenContext;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.changelog.fs.FsStateChangelogStorageFactory;
import org.apache.flink.configuration.CheckpointingOptions;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.HighAvailabilityOptions;
import org.apache.flink.configuration.MemorySize;
import org.apache.flink.configuration.RpcOptions;
import org.apache.flink.configuration.StateBackendOptions;
import org.apache.flink.configuration.TaskManagerOptions;
import org.apache.flink.core.fs.Path;
import org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration;
import org.apache.flink.runtime.testutils.ZooKeeperTestUtils;
import org.apache.flink.state.forst.ForStOptions;
import org.apache.flink.state.forst.ForStStateBackend;
import org.apache.flink.state.rocksdb.EmbeddedRocksDBStateBackend;
import org.apache.flink.state.rocksdb.RocksDBOptions;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.legacy.SourceFunction;
import org.apache.flink.streaming.api.functions.windowing.RichWindowFunction;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.util.RestartStrategyUtils;
import org.apache.flink.test.checkpointing.utils.FailingSource;
import org.apache.flink.test.checkpointing.utils.IntType;
import org.apache.flink.test.checkpointing.utils.ValidatingSink;
import org.apache.flink.test.util.MiniClusterWithClientResource;
import org.apache.flink.util.Collector;
import org.apache.flink.util.TestLogger;
import org.apache.curator.test.TestingServer;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.File;
import java.io.IOException;
import java.time.Duration;
import java.util.Arrays;
import java.util.Collection;
import java.util.Map;
import java.util.stream.Collectors;
import static org.apache.flink.test.checkpointing.EventTimeWindowCheckpointingITCase.StateBackendEnum.ROCKSDB_INCREMENTAL_ZK;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* This verifies that checkpointing works correctly with event time windows. This is more strict
* than {@link ProcessingTimeWindowCheckpointingITCase} because for event-time the contents of the
* emitted windows are deterministic.
*
* <p>Split into multiple test classes in order to decrease the runtime per backend and not run into
* CI infrastructure limits like no std output being emitted for I/O heavy variants.
*/
@SuppressWarnings("serial")
@RunWith(Parameterized.class)
public class EventTimeWindowCheckpointingITCase extends TestLogger {
private static final int MAX_MEM_STATE_SIZE = 20 * 1024 * 1024;
private static final int PARALLELISM = 4;
private static final int NUM_OF_TASK_MANAGERS = 2;
private TestingServer zkServer;
public MiniClusterWithClientResource miniClusterResource;
@ClassRule public static TemporaryFolder tempFolder = new TemporaryFolder();
@Rule public TestName name = new TestName();
private Configuration configuration;
public StateBackendEnum stateBackendEnum;
enum StateBackendEnum {
MEM,
FILE,
ROCKSDB_FULL,
ROCKSDB_INCREMENTAL,
ROCKSDB_INCREMENTAL_ZK,
FORST_INCREMENTAL
}
@Parameterized.Parameters(name = "statebackend type ={0}")
public static Collection<Object[]> parameter() {
return Arrays.stream(StateBackendEnum.values())
.map((type) -> new Object[][] {{type}})
.flatMap(Arrays::stream)
.collect(Collectors.toList());
}
public EventTimeWindowCheckpointingITCase(StateBackendEnum stateBackendEnum) {
this.stateBackendEnum = stateBackendEnum;
}
protected StateBackendEnum getStateBackend() {
return this.stateBackendEnum;
}
protected final MiniClusterWithClientResource getMiniClusterResource() {
return new MiniClusterWithClientResource(
new MiniClusterResourceConfiguration.Builder()
.setConfiguration(configuration)
.setNumberTaskManagers(NUM_OF_TASK_MANAGERS)
.setNumberSlotsPerTaskManager(PARALLELISM / NUM_OF_TASK_MANAGERS)
.build());
}
private Configuration getConfigurationSafe() {
try {
return getConfiguration();
} catch (Exception e) {
throw new AssertionError("Could not initialize test.", e);
}
}
private Configuration getConfiguration() throws Exception {
// print a message when starting a test method to avoid Travis' <tt>"Maven produced no
// output for xxx seconds."</tt> messages
System.out.println(
"Starting " + getClass().getCanonicalName() + "#" + name.getMethodName() + ".");
// Testing HA Scenario / ZKCompletedCheckpointStore with incremental checkpoints
StateBackendEnum stateBackendEnum = getStateBackend();
if (ROCKSDB_INCREMENTAL_ZK.equals(stateBackendEnum)) {
zkServer = ZooKeeperTestUtils.createAndStartZookeeperTestingServer();
}
Configuration config = createClusterConfig();
switch (stateBackendEnum) {
case MEM:
config.set(StateBackendOptions.STATE_BACKEND, "hashmap");
config.set(CheckpointingOptions.CHECKPOINT_STORAGE, "jobmanager");
break;
case FILE:
{
final File backups = tempFolder.newFolder().getAbsoluteFile();
config.set(StateBackendOptions.STATE_BACKEND, "hashmap");
config.set(
CheckpointingOptions.CHECKPOINTS_DIRECTORY,
Path.fromLocalFile(backups).toUri().toString());
break;
}
case ROCKSDB_FULL:
{
setupRocksDB(config, -1, false);
break;
}
case ROCKSDB_INCREMENTAL:
// Test RocksDB based timer service as well
config.set(
RocksDBOptions.TIMER_SERVICE_FACTORY,
EmbeddedRocksDBStateBackend.PriorityQueueStateType.ROCKSDB);
setupRocksDB(config, 16, true);
break;
case ROCKSDB_INCREMENTAL_ZK:
{
setupRocksDB(config, 16, true);
break;
}
case FORST_INCREMENTAL:
{
config.set(
ForStOptions.TIMER_SERVICE_FACTORY,
ForStStateBackend.PriorityQueueStateType.ForStDB);
setupForSt(config, 16);
break;
}
default:
throw new IllegalStateException("No backend selected.");
}
// Configure DFS DSTL for this test as it might produce too much GC pressure if
// ChangelogStateBackend is used.
// Doing it on cluster level unconditionally as randomization currently happens on the job
// level (environment); while this factory can only be set on the cluster level.
FsStateChangelogStorageFactory.configure(
config, tempFolder.newFolder(), Duration.ofMinutes(1), 10);
return config;
}
private void setupRocksDB(
Configuration config, int fileSizeThreshold, boolean incrementalCheckpoints)
throws IOException {
// Configure the managed memory size as 64MB per slot for rocksDB state backend.
config.set(
TaskManagerOptions.MANAGED_MEMORY_SIZE,
MemorySize.ofMebiBytes(PARALLELISM / NUM_OF_TASK_MANAGERS * 64));
final String rocksDb = tempFolder.newFolder().getAbsolutePath();
final File backups = tempFolder.newFolder().getAbsoluteFile();
// we use the fs backend with small threshold here to test the behaviour with file
// references, not self contained byte handles
config.set(StateBackendOptions.STATE_BACKEND, "rocksdb");
config.set(CheckpointingOptions.INCREMENTAL_CHECKPOINTS, incrementalCheckpoints);
config.set(
CheckpointingOptions.CHECKPOINTS_DIRECTORY,
Path.fromLocalFile(backups).toUri().toString());
if (fileSizeThreshold != -1) {
config.set(
CheckpointingOptions.FS_SMALL_FILE_THRESHOLD,
MemorySize.parse(fileSizeThreshold + "b"));
}
config.set(RocksDBOptions.LOCAL_DIRECTORIES, rocksDb);
}
private void setupForSt(Configuration config, int fileSizeThreshold) throws IOException {
// Configure the managed memory size as 64MB per slot for rocksDB state backend.
config.set(
TaskManagerOptions.MANAGED_MEMORY_SIZE,
MemorySize.ofMebiBytes(PARALLELISM / NUM_OF_TASK_MANAGERS * 64));
final String forstdb = tempFolder.newFolder().getAbsolutePath();
final File backups = tempFolder.newFolder().getAbsoluteFile();
// we use the fs backend with small threshold here to test the behaviour with file
// references, not self contained byte handles
config.set(StateBackendOptions.STATE_BACKEND, "forst");
config.set(CheckpointingOptions.INCREMENTAL_CHECKPOINTS, true);
config.set(
CheckpointingOptions.CHECKPOINTS_DIRECTORY,
Path.fromLocalFile(backups).toUri().toString());
if (fileSizeThreshold != -1) {
config.set(
CheckpointingOptions.FS_SMALL_FILE_THRESHOLD,
MemorySize.parse(fileSizeThreshold + "b"));
}
config.set(ForStOptions.LOCAL_DIRECTORIES, forstdb);
}
protected Configuration createClusterConfig() throws IOException {
TemporaryFolder temporaryFolder = new TemporaryFolder();
temporaryFolder.create();
final File haDir = temporaryFolder.newFolder();
Configuration config = new Configuration();
config.set(RpcOptions.FRAMESIZE, String.valueOf(MAX_MEM_STATE_SIZE) + "b");
if (zkServer != null) {
config.set(HighAvailabilityOptions.HA_MODE, "ZOOKEEPER");
config.set(HighAvailabilityOptions.HA_ZOOKEEPER_QUORUM, zkServer.getConnectString());
config.set(HighAvailabilityOptions.HA_STORAGE_PATH, haDir.toURI().toString());
}
return config;
}
@Before
public void setupTestCluster() throws Exception {
configuration = getConfigurationSafe();
miniClusterResource = getMiniClusterResource();
miniClusterResource.before();
}
@After
public void stopTestCluster() throws IOException {
if (miniClusterResource != null) {
miniClusterResource.after();
miniClusterResource = null;
}
if (zkServer != null) {
zkServer.close();
zkServer = null;
}
// Prints a message when finishing a test method to avoid Travis' <tt>"Maven produced no
// output
// for xxx seconds."</tt> messages.
System.out.println(
"Finished " + getClass().getCanonicalName() + "#" + name.getMethodName() + ".");
}
// ------------------------------------------------------------------------
@Test
public void testTumblingTimeWindow() {
final int numElementsPerKey = numElementsPerKey();
final int windowSize = windowSize();
final int numKeys = numKeys();
try {
StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment(configuration);
env.setParallelism(PARALLELISM);
env.enableCheckpointing(100);
RestartStrategyUtils.configureFixedDelayRestartStrategy(env, 1, 0L);
env.getConfig().setUseSnapshotCompression(true);
env.addSource(
new FailingSource(
new KeyedEventTimeGenerator(numKeys, windowSize),
numElementsPerKey))
.rebalance()
.keyBy(x -> x.f0)
.window(TumblingEventTimeWindows.of(Duration.ofMillis(windowSize)))
.apply(
new RichWindowFunction<
Tuple2<Long, IntType>,
Tuple4<Long, Long, Long, IntType>,
Long,
TimeWindow>() {
private boolean open = false;
@Override
public void open(OpenContext openContext) {
assertEquals(
PARALLELISM,
getRuntimeContext()
.getTaskInfo()
.getNumberOfParallelSubtasks());
open = true;
}
@Override
public void apply(
Long l,
TimeWindow window,
Iterable<Tuple2<Long, IntType>> values,
Collector<Tuple4<Long, Long, Long, IntType>> out) {
// validate that the function has been opened properly
assertTrue(open);
int sum = 0;
long key = -1;
for (Tuple2<Long, IntType> value : values) {
sum += value.f1.value;
key = value.f0;
}
final Tuple4<Long, Long, Long, IntType> result =
new Tuple4<>(
key,
window.getStart(),
window.getEnd(),
new IntType(sum));
out.collect(result);
}
})
.addSink(
new ValidatingSink<>(
new SinkValidatorUpdateFun(numElementsPerKey),
new SinkValidatorCheckFun(
numKeys, numElementsPerKey, windowSize)))
.setParallelism(1);
env.execute("Tumbling Window Test");
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testTumblingTimeWindowWithKVStateMinMaxParallelism() {
doTestTumblingTimeWindowWithKVState(PARALLELISM);
}
@Test
public void testTumblingTimeWindowWithKVStateMaxMaxParallelism() {
doTestTumblingTimeWindowWithKVState(1 << 15);
}
public void doTestTumblingTimeWindowWithKVState(int maxParallelism) {
final int numElementsPerKey = numElementsPerKey();
final int windowSize = windowSize();
final int numKeys = numKeys();
try {
StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment(configuration);
env.setParallelism(PARALLELISM);
env.setMaxParallelism(maxParallelism);
env.enableCheckpointing(100);
RestartStrategyUtils.configureFixedDelayRestartStrategy(env, 1, 0L);
env.getConfig().setUseSnapshotCompression(true);
env.addSource(
new FailingSource(
new KeyedEventTimeGenerator(numKeys, windowSize),
numElementsPerKey))
.rebalance()
.keyBy(x -> x.f0)
.window(TumblingEventTimeWindows.of(Duration.ofMillis(windowSize)))
.apply(
new RichWindowFunction<
Tuple2<Long, IntType>,
Tuple4<Long, Long, Long, IntType>,
Long,
TimeWindow>() {
private boolean open = false;
private ValueState<Integer> count;
@Override
public void open(OpenContext openContext) {
assertEquals(
PARALLELISM,
getRuntimeContext()
.getTaskInfo()
.getNumberOfParallelSubtasks());
open = true;
count =
getRuntimeContext()
.getState(
new ValueStateDescriptor<>(
"count", Integer.class, 0));
}
@Override
public void apply(
Long l,
TimeWindow window,
Iterable<Tuple2<Long, IntType>> values,
Collector<Tuple4<Long, Long, Long, IntType>> out)
throws Exception {
// the window count state starts with the key, so that we get
// different count results for each key
if (count.value() == 0) {
count.update(l.intValue());
}
// validate that the function has been opened properly
assertTrue(open);
count.update(count.value() + 1);
out.collect(
new Tuple4<>(
l,
window.getStart(),
window.getEnd(),
new IntType(count.value())));
}
})
.addSink(
new ValidatingSink<>(
new CountingSinkValidatorUpdateFun(),
new SinkValidatorCheckFun(
numKeys, numElementsPerKey, windowSize)))
.setParallelism(1);
env.execute("Tumbling Window Test");
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testSlidingTimeWindow() {
final int numElementsPerKey = numElementsPerKey();
final int windowSize = windowSize();
final int windowSlide = windowSlide();
final int numKeys = numKeys();
try {
StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment(configuration);
env.setMaxParallelism(2 * PARALLELISM);
env.setParallelism(PARALLELISM);
env.enableCheckpointing(100);
RestartStrategyUtils.configureFixedDelayRestartStrategy(env, 1, 0L);
env.getConfig().setUseSnapshotCompression(true);
env.addSource(
new FailingSource(
new KeyedEventTimeGenerator(numKeys, windowSlide),
numElementsPerKey))
.rebalance()
.keyBy(x -> x.f0)
.window(
SlidingEventTimeWindows.of(
Duration.ofMillis(windowSize), Duration.ofMillis(windowSlide)))
.apply(
new RichWindowFunction<
Tuple2<Long, IntType>,
Tuple4<Long, Long, Long, IntType>,
Long,
TimeWindow>() {
private boolean open = false;
@Override
public void open(OpenContext openContext) {
assertEquals(
PARALLELISM,
getRuntimeContext()
.getTaskInfo()
.getNumberOfParallelSubtasks());
open = true;
}
@Override
public void apply(
Long l,
TimeWindow window,
Iterable<Tuple2<Long, IntType>> values,
Collector<Tuple4<Long, Long, Long, IntType>> out) {
// validate that the function has been opened properly
assertTrue(open);
int sum = 0;
long key = -1;
for (Tuple2<Long, IntType> value : values) {
sum += value.f1.value;
key = value.f0;
}
final Tuple4<Long, Long, Long, IntType> output =
new Tuple4<>(
key,
window.getStart(),
window.getEnd(),
new IntType(sum));
out.collect(output);
}
})
.addSink(
new ValidatingSink<>(
new SinkValidatorUpdateFun(numElementsPerKey),
new SinkValidatorCheckFun(
numKeys, numElementsPerKey, windowSlide)))
.setParallelism(1);
env.execute("Tumbling Window Test");
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testPreAggregatedTumblingTimeWindow() {
final int numElementsPerKey = numElementsPerKey();
final int windowSize = windowSize();
final int numKeys = numKeys();
try {
StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment(configuration);
env.setParallelism(PARALLELISM);
env.enableCheckpointing(100);
RestartStrategyUtils.configureFixedDelayRestartStrategy(env, 1, 0L);
env.getConfig().setUseSnapshotCompression(true);
env.addSource(
new FailingSource(
new KeyedEventTimeGenerator(numKeys, windowSize),
numElementsPerKey))
.rebalance()
.keyBy(x -> x.f0)
.window(TumblingEventTimeWindows.of(Duration.ofMillis(windowSize)))
.reduce(
new ReduceFunction<Tuple2<Long, IntType>>() {
@Override
public Tuple2<Long, IntType> reduce(
Tuple2<Long, IntType> a, Tuple2<Long, IntType> b) {
return new Tuple2<>(a.f0, new IntType(a.f1.value + b.f1.value));
}
},
new RichWindowFunction<
Tuple2<Long, IntType>,
Tuple4<Long, Long, Long, IntType>,
Long,
TimeWindow>() {
private boolean open = false;
@Override
public void open(OpenContext openContext) {
assertEquals(
PARALLELISM,
getRuntimeContext()
.getTaskInfo()
.getNumberOfParallelSubtasks());
open = true;
}
@Override
public void apply(
Long l,
TimeWindow window,
Iterable<Tuple2<Long, IntType>> input,
Collector<Tuple4<Long, Long, Long, IntType>> out) {
// validate that the function has been opened properly
assertTrue(open);
for (Tuple2<Long, IntType> in : input) {
final Tuple4<Long, Long, Long, IntType> output =
new Tuple4<>(
in.f0,
window.getStart(),
window.getEnd(),
in.f1);
out.collect(output);
}
}
})
.addSink(
new ValidatingSink<>(
new SinkValidatorUpdateFun(numElementsPerKey),
new SinkValidatorCheckFun(
numKeys, numElementsPerKey, windowSize)))
.setParallelism(1);
env.execute("Tumbling Window Test");
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testPreAggregatedSlidingTimeWindow() {
final int numElementsPerKey = numElementsPerKey();
final int windowSize = windowSize();
final int windowSlide = windowSlide();
final int numKeys = numKeys();
try {
StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment(configuration);
env.setParallelism(PARALLELISM);
env.enableCheckpointing(100);
RestartStrategyUtils.configureFixedDelayRestartStrategy(env, 1, 0L);
env.getConfig().setUseSnapshotCompression(true);
env.addSource(
new FailingSource(
new KeyedEventTimeGenerator(numKeys, windowSlide),
numElementsPerKey))
.rebalance()
.keyBy(x -> x.f0)
.window(
SlidingEventTimeWindows.of(
Duration.ofMillis(windowSize), Duration.ofMillis(windowSlide)))
.reduce(
new ReduceFunction<Tuple2<Long, IntType>>() {
@Override
public Tuple2<Long, IntType> reduce(
Tuple2<Long, IntType> a, Tuple2<Long, IntType> b) {
// validate that the function has been opened properly
return new Tuple2<>(a.f0, new IntType(a.f1.value + b.f1.value));
}
},
new RichWindowFunction<
Tuple2<Long, IntType>,
Tuple4<Long, Long, Long, IntType>,
Long,
TimeWindow>() {
private boolean open = false;
@Override
public void open(OpenContext openContext) {
assertEquals(
PARALLELISM,
getRuntimeContext()
.getTaskInfo()
.getNumberOfParallelSubtasks());
open = true;
}
@Override
public void apply(
Long l,
TimeWindow window,
Iterable<Tuple2<Long, IntType>> input,
Collector<Tuple4<Long, Long, Long, IntType>> out) {
// validate that the function has been opened properly
assertTrue(open);
for (Tuple2<Long, IntType> in : input) {
out.collect(
new Tuple4<>(
in.f0,
window.getStart(),
window.getEnd(),
in.f1));
}
}
})
.addSink(
new ValidatingSink<>(
new SinkValidatorUpdateFun(numElementsPerKey),
new SinkValidatorCheckFun(
numKeys, numElementsPerKey, windowSlide)))
.setParallelism(1);
env.execute("Tumbling Window Test");
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
// ------------------------------------------------------------------------
// Utilities
// ------------------------------------------------------------------------
/** For validating the stateful window counts. */
static class CountingSinkValidatorUpdateFun
implements ValidatingSink.CountUpdater<Tuple4<Long, Long, Long, IntType>> {
@Override
public void updateCount(
Tuple4<Long, Long, Long, IntType> value, Map<Long, Integer> windowCounts) {
windowCounts.merge(value.f0, 1, (a, b) -> a + b);
// verify the contents of that window, the contents should be:
// (key + num windows so far)
assertEquals(
"Window counts don't match for key " + value.f0 + ".",
value.f0.intValue() + windowCounts.get(value.f0),
value.f3.value);
}
}
// ------------------------------------
static class SinkValidatorUpdateFun
implements ValidatingSink.CountUpdater<Tuple4<Long, Long, Long, IntType>> {
private final int elementsPerKey;
SinkValidatorUpdateFun(int elementsPerKey) {
this.elementsPerKey = elementsPerKey;
}
@Override
public void updateCount(
Tuple4<Long, Long, Long, IntType> value, Map<Long, Integer> windowCounts) {
// verify the contents of that window, Tuple4.f1 and .f2 are the window start/end
// the sum should be "sum (start .. end-1)"
int expectedSum = 0;
// we shorten the range if it goes beyond elementsPerKey, because those are "incomplete"
// sliding windows
long countUntil = Math.min(value.f2, elementsPerKey);
for (long i = value.f1; i < countUntil; i++) {
// only sum up positive vals, to filter out the negative start of the
// first sliding windows
if (i > 0) {
expectedSum += i;
}
}
assertEquals(
"Window start: " + value.f1 + " end: " + value.f2, expectedSum, value.f3.value);
windowCounts.merge(value.f0, 1, (val, increment) -> val + increment);
}
}
static class SinkValidatorCheckFun implements ValidatingSink.ResultChecker {
private final int numKeys;
private final int numWindowsExpected;
SinkValidatorCheckFun(int numKeys, int elementsPerKey, int elementsPerWindow) {
this.numKeys = numKeys;
this.numWindowsExpected = elementsPerKey / elementsPerWindow;
}
@Override
public boolean checkResult(Map<Long, Integer> windowCounts) {
if (windowCounts.size() == numKeys) {
for (Integer windowCount : windowCounts.values()) {
if (windowCount < numWindowsExpected) {
return false;
}
}
return true;
}
return false;
}
}
static class KeyedEventTimeGenerator implements FailingSource.EventEmittingGenerator {
private final int keyUniverseSize;
private final int watermarkTrailing;
public KeyedEventTimeGenerator(int keyUniverseSize, int numElementsPerWindow) {
this.keyUniverseSize = keyUniverseSize;
// we let the watermark a bit behind, so that there can be in-flight timers that
// required checkpointing
// to include correct timer snapshots in our testing.
this.watermarkTrailing = 4 * numElementsPerWindow / 3;
}
@Override
public void emitEvent(
SourceFunction.SourceContext<Tuple2<Long, IntType>> ctx, int eventSequenceNo) {
final IntType intTypeNext = new IntType(eventSequenceNo);
for (long i = 0; i < keyUniverseSize; i++) {
final Tuple2<Long, IntType> generatedEvent = new Tuple2<>(i, intTypeNext);
ctx.collectWithTimestamp(generatedEvent, eventSequenceNo);
}
ctx.emitWatermark(new Watermark(eventSequenceNo - watermarkTrailing));
}
}
private int numElementsPerKey() {
return 3000;
}
private int windowSize() {
return 1000;
}
private int windowSlide() {
return 100;
}
private int numKeys() {
return 100;
}
}
|
googleapis/google-cloud-java | 37,687 | java-discoveryengine/proto-google-cloud-discoveryengine-v1alpha/src/main/java/com/google/cloud/discoveryengine/v1alpha/ImportSuggestionDenyListEntriesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1alpha/import_config.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1alpha;
/**
*
*
* <pre>
* Response message for
* [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1alpha.CompletionService.ImportSuggestionDenyListEntries]
* method.
* </pre>
*
* Protobuf type {@code
* google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse}
*/
public final class ImportSuggestionDenyListEntriesResponse
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse)
ImportSuggestionDenyListEntriesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ImportSuggestionDenyListEntriesResponse.newBuilder() to construct.
private ImportSuggestionDenyListEntriesResponse(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ImportSuggestionDenyListEntriesResponse() {
errorSamples_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ImportSuggestionDenyListEntriesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1alpha.ImportConfigProto
.internal_static_google_cloud_discoveryengine_v1alpha_ImportSuggestionDenyListEntriesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1alpha.ImportConfigProto
.internal_static_google_cloud_discoveryengine_v1alpha_ImportSuggestionDenyListEntriesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse.class,
com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse.Builder
.class);
}
public static final int ERROR_SAMPLES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.rpc.Status> errorSamples_;
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.rpc.Status> getErrorSamplesList() {
return errorSamples_;
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.rpc.StatusOrBuilder> getErrorSamplesOrBuilderList() {
return errorSamples_;
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
@java.lang.Override
public int getErrorSamplesCount() {
return errorSamples_.size();
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
@java.lang.Override
public com.google.rpc.Status getErrorSamples(int index) {
return errorSamples_.get(index);
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
@java.lang.Override
public com.google.rpc.StatusOrBuilder getErrorSamplesOrBuilder(int index) {
return errorSamples_.get(index);
}
public static final int IMPORTED_ENTRIES_COUNT_FIELD_NUMBER = 2;
private long importedEntriesCount_ = 0L;
/**
*
*
* <pre>
* Count of deny list entries successfully imported.
* </pre>
*
* <code>int64 imported_entries_count = 2;</code>
*
* @return The importedEntriesCount.
*/
@java.lang.Override
public long getImportedEntriesCount() {
return importedEntriesCount_;
}
public static final int FAILED_ENTRIES_COUNT_FIELD_NUMBER = 3;
private long failedEntriesCount_ = 0L;
/**
*
*
* <pre>
* Count of deny list entries that failed to be imported.
* </pre>
*
* <code>int64 failed_entries_count = 3;</code>
*
* @return The failedEntriesCount.
*/
@java.lang.Override
public long getFailedEntriesCount() {
return failedEntriesCount_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < errorSamples_.size(); i++) {
output.writeMessage(1, errorSamples_.get(i));
}
if (importedEntriesCount_ != 0L) {
output.writeInt64(2, importedEntriesCount_);
}
if (failedEntriesCount_ != 0L) {
output.writeInt64(3, failedEntriesCount_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < errorSamples_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, errorSamples_.get(i));
}
if (importedEntriesCount_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(2, importedEntriesCount_);
}
if (failedEntriesCount_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(3, failedEntriesCount_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof
com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse other =
(com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse) obj;
if (!getErrorSamplesList().equals(other.getErrorSamplesList())) return false;
if (getImportedEntriesCount() != other.getImportedEntriesCount()) return false;
if (getFailedEntriesCount() != other.getFailedEntriesCount()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getErrorSamplesCount() > 0) {
hash = (37 * hash) + ERROR_SAMPLES_FIELD_NUMBER;
hash = (53 * hash) + getErrorSamplesList().hashCode();
}
hash = (37 * hash) + IMPORTED_ENTRIES_COUNT_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getImportedEntriesCount());
hash = (37 * hash) + FAILED_ENTRIES_COUNT_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getFailedEntriesCount());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1alpha.CompletionService.ImportSuggestionDenyListEntries]
* method.
* </pre>
*
* Protobuf type {@code
* google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse)
com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1alpha.ImportConfigProto
.internal_static_google_cloud_discoveryengine_v1alpha_ImportSuggestionDenyListEntriesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1alpha.ImportConfigProto
.internal_static_google_cloud_discoveryengine_v1alpha_ImportSuggestionDenyListEntriesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
.class,
com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
.Builder.class);
}
// Construct using
// com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (errorSamplesBuilder_ == null) {
errorSamples_ = java.util.Collections.emptyList();
} else {
errorSamples_ = null;
errorSamplesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
importedEntriesCount_ = 0L;
failedEntriesCount_ = 0L;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1alpha.ImportConfigProto
.internal_static_google_cloud_discoveryengine_v1alpha_ImportSuggestionDenyListEntriesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
build() {
com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
buildPartial() {
com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse result =
new com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse(
this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse result) {
if (errorSamplesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
errorSamples_ = java.util.Collections.unmodifiableList(errorSamples_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.errorSamples_ = errorSamples_;
} else {
result.errorSamples_ = errorSamplesBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.importedEntriesCount_ = importedEntriesCount_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.failedEntriesCount_ = failedEntriesCount_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof
com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse) {
return mergeFrom(
(com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse)
other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse other) {
if (other
== com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
.getDefaultInstance()) return this;
if (errorSamplesBuilder_ == null) {
if (!other.errorSamples_.isEmpty()) {
if (errorSamples_.isEmpty()) {
errorSamples_ = other.errorSamples_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureErrorSamplesIsMutable();
errorSamples_.addAll(other.errorSamples_);
}
onChanged();
}
} else {
if (!other.errorSamples_.isEmpty()) {
if (errorSamplesBuilder_.isEmpty()) {
errorSamplesBuilder_.dispose();
errorSamplesBuilder_ = null;
errorSamples_ = other.errorSamples_;
bitField0_ = (bitField0_ & ~0x00000001);
errorSamplesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getErrorSamplesFieldBuilder()
: null;
} else {
errorSamplesBuilder_.addAllMessages(other.errorSamples_);
}
}
}
if (other.getImportedEntriesCount() != 0L) {
setImportedEntriesCount(other.getImportedEntriesCount());
}
if (other.getFailedEntriesCount() != 0L) {
setFailedEntriesCount(other.getFailedEntriesCount());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.rpc.Status m =
input.readMessage(com.google.rpc.Status.parser(), extensionRegistry);
if (errorSamplesBuilder_ == null) {
ensureErrorSamplesIsMutable();
errorSamples_.add(m);
} else {
errorSamplesBuilder_.addMessage(m);
}
break;
} // case 10
case 16:
{
importedEntriesCount_ = input.readInt64();
bitField0_ |= 0x00000002;
break;
} // case 16
case 24:
{
failedEntriesCount_ = input.readInt64();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.rpc.Status> errorSamples_ = java.util.Collections.emptyList();
private void ensureErrorSamplesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
errorSamples_ = new java.util.ArrayList<com.google.rpc.Status>(errorSamples_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
errorSamplesBuilder_;
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
public java.util.List<com.google.rpc.Status> getErrorSamplesList() {
if (errorSamplesBuilder_ == null) {
return java.util.Collections.unmodifiableList(errorSamples_);
} else {
return errorSamplesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
public int getErrorSamplesCount() {
if (errorSamplesBuilder_ == null) {
return errorSamples_.size();
} else {
return errorSamplesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
public com.google.rpc.Status getErrorSamples(int index) {
if (errorSamplesBuilder_ == null) {
return errorSamples_.get(index);
} else {
return errorSamplesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
public Builder setErrorSamples(int index, com.google.rpc.Status value) {
if (errorSamplesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorSamplesIsMutable();
errorSamples_.set(index, value);
onChanged();
} else {
errorSamplesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
public Builder setErrorSamples(int index, com.google.rpc.Status.Builder builderForValue) {
if (errorSamplesBuilder_ == null) {
ensureErrorSamplesIsMutable();
errorSamples_.set(index, builderForValue.build());
onChanged();
} else {
errorSamplesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
public Builder addErrorSamples(com.google.rpc.Status value) {
if (errorSamplesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorSamplesIsMutable();
errorSamples_.add(value);
onChanged();
} else {
errorSamplesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
public Builder addErrorSamples(int index, com.google.rpc.Status value) {
if (errorSamplesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorSamplesIsMutable();
errorSamples_.add(index, value);
onChanged();
} else {
errorSamplesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
public Builder addErrorSamples(com.google.rpc.Status.Builder builderForValue) {
if (errorSamplesBuilder_ == null) {
ensureErrorSamplesIsMutable();
errorSamples_.add(builderForValue.build());
onChanged();
} else {
errorSamplesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
public Builder addErrorSamples(int index, com.google.rpc.Status.Builder builderForValue) {
if (errorSamplesBuilder_ == null) {
ensureErrorSamplesIsMutable();
errorSamples_.add(index, builderForValue.build());
onChanged();
} else {
errorSamplesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
public Builder addAllErrorSamples(java.lang.Iterable<? extends com.google.rpc.Status> values) {
if (errorSamplesBuilder_ == null) {
ensureErrorSamplesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, errorSamples_);
onChanged();
} else {
errorSamplesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
public Builder clearErrorSamples() {
if (errorSamplesBuilder_ == null) {
errorSamples_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
errorSamplesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
public Builder removeErrorSamples(int index) {
if (errorSamplesBuilder_ == null) {
ensureErrorSamplesIsMutable();
errorSamples_.remove(index);
onChanged();
} else {
errorSamplesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
public com.google.rpc.Status.Builder getErrorSamplesBuilder(int index) {
return getErrorSamplesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
public com.google.rpc.StatusOrBuilder getErrorSamplesOrBuilder(int index) {
if (errorSamplesBuilder_ == null) {
return errorSamples_.get(index);
} else {
return errorSamplesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
public java.util.List<? extends com.google.rpc.StatusOrBuilder> getErrorSamplesOrBuilderList() {
if (errorSamplesBuilder_ != null) {
return errorSamplesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(errorSamples_);
}
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
public com.google.rpc.Status.Builder addErrorSamplesBuilder() {
return getErrorSamplesFieldBuilder().addBuilder(com.google.rpc.Status.getDefaultInstance());
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
public com.google.rpc.Status.Builder addErrorSamplesBuilder(int index) {
return getErrorSamplesFieldBuilder()
.addBuilder(index, com.google.rpc.Status.getDefaultInstance());
}
/**
*
*
* <pre>
* A sample of errors encountered while processing the request.
* </pre>
*
* <code>repeated .google.rpc.Status error_samples = 1;</code>
*/
public java.util.List<com.google.rpc.Status.Builder> getErrorSamplesBuilderList() {
return getErrorSamplesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
getErrorSamplesFieldBuilder() {
if (errorSamplesBuilder_ == null) {
errorSamplesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status,
com.google.rpc.Status.Builder,
com.google.rpc.StatusOrBuilder>(
errorSamples_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
errorSamples_ = null;
}
return errorSamplesBuilder_;
}
private long importedEntriesCount_;
/**
*
*
* <pre>
* Count of deny list entries successfully imported.
* </pre>
*
* <code>int64 imported_entries_count = 2;</code>
*
* @return The importedEntriesCount.
*/
@java.lang.Override
public long getImportedEntriesCount() {
return importedEntriesCount_;
}
/**
*
*
* <pre>
* Count of deny list entries successfully imported.
* </pre>
*
* <code>int64 imported_entries_count = 2;</code>
*
* @param value The importedEntriesCount to set.
* @return This builder for chaining.
*/
public Builder setImportedEntriesCount(long value) {
importedEntriesCount_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Count of deny list entries successfully imported.
* </pre>
*
* <code>int64 imported_entries_count = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearImportedEntriesCount() {
bitField0_ = (bitField0_ & ~0x00000002);
importedEntriesCount_ = 0L;
onChanged();
return this;
}
private long failedEntriesCount_;
/**
*
*
* <pre>
* Count of deny list entries that failed to be imported.
* </pre>
*
* <code>int64 failed_entries_count = 3;</code>
*
* @return The failedEntriesCount.
*/
@java.lang.Override
public long getFailedEntriesCount() {
return failedEntriesCount_;
}
/**
*
*
* <pre>
* Count of deny list entries that failed to be imported.
* </pre>
*
* <code>int64 failed_entries_count = 3;</code>
*
* @param value The failedEntriesCount to set.
* @return This builder for chaining.
*/
public Builder setFailedEntriesCount(long value) {
failedEntriesCount_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Count of deny list entries that failed to be imported.
* </pre>
*
* <code>int64 failed_entries_count = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearFailedEntriesCount() {
bitField0_ = (bitField0_ & ~0x00000004);
failedEntriesCount_ = 0L;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse)
private static final com.google.cloud.discoveryengine.v1alpha
.ImportSuggestionDenyListEntriesResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse();
}
public static com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ImportSuggestionDenyListEntriesResponse> PARSER =
new com.google.protobuf.AbstractParser<ImportSuggestionDenyListEntriesResponse>() {
@java.lang.Override
public ImportSuggestionDenyListEntriesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ImportSuggestionDenyListEntriesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ImportSuggestionDenyListEntriesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ImportSuggestionDenyListEntriesResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,719 | java-notebooks/proto-google-cloud-notebooks-v1beta1/src/main/java/com/google/cloud/notebooks/v1beta1/CreateEnvironmentRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/notebooks/v1beta1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.notebooks.v1beta1;
/**
*
*
* <pre>
* Request for creating a notebook environment.
* </pre>
*
* Protobuf type {@code google.cloud.notebooks.v1beta1.CreateEnvironmentRequest}
*/
public final class CreateEnvironmentRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.notebooks.v1beta1.CreateEnvironmentRequest)
CreateEnvironmentRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateEnvironmentRequest.newBuilder() to construct.
private CreateEnvironmentRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateEnvironmentRequest() {
parent_ = "";
environmentId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateEnvironmentRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.notebooks.v1beta1.NotebooksProto
.internal_static_google_cloud_notebooks_v1beta1_CreateEnvironmentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.notebooks.v1beta1.NotebooksProto
.internal_static_google_cloud_notebooks_v1beta1_CreateEnvironmentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest.class,
com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Format: `projects/{project_id}/locations/{location}`
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Format: `projects/{project_id}/locations/{location}`
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ENVIRONMENT_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object environmentId_ = "";
/**
*
*
* <pre>
* Required. User-defined unique ID of this environment. The `environment_id` must
* be 1 to 63 characters long and contain only lowercase letters,
* numeric characters, and dashes. The first character must be a lowercase
* letter and the last character cannot be a dash.
* </pre>
*
* <code>string environment_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The environmentId.
*/
@java.lang.Override
public java.lang.String getEnvironmentId() {
java.lang.Object ref = environmentId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
environmentId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. User-defined unique ID of this environment. The `environment_id` must
* be 1 to 63 characters long and contain only lowercase letters,
* numeric characters, and dashes. The first character must be a lowercase
* letter and the last character cannot be a dash.
* </pre>
*
* <code>string environment_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for environmentId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getEnvironmentIdBytes() {
java.lang.Object ref = environmentId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
environmentId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ENVIRONMENT_FIELD_NUMBER = 3;
private com.google.cloud.notebooks.v1beta1.Environment environment_;
/**
*
*
* <pre>
* Required. The environment to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1beta1.Environment environment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the environment field is set.
*/
@java.lang.Override
public boolean hasEnvironment() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The environment to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1beta1.Environment environment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The environment.
*/
@java.lang.Override
public com.google.cloud.notebooks.v1beta1.Environment getEnvironment() {
return environment_ == null
? com.google.cloud.notebooks.v1beta1.Environment.getDefaultInstance()
: environment_;
}
/**
*
*
* <pre>
* Required. The environment to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1beta1.Environment environment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.notebooks.v1beta1.EnvironmentOrBuilder getEnvironmentOrBuilder() {
return environment_ == null
? com.google.cloud.notebooks.v1beta1.Environment.getDefaultInstance()
: environment_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(environmentId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, environmentId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getEnvironment());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(environmentId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, environmentId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getEnvironment());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest)) {
return super.equals(obj);
}
com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest other =
(com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getEnvironmentId().equals(other.getEnvironmentId())) return false;
if (hasEnvironment() != other.hasEnvironment()) return false;
if (hasEnvironment()) {
if (!getEnvironment().equals(other.getEnvironment())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + ENVIRONMENT_ID_FIELD_NUMBER;
hash = (53 * hash) + getEnvironmentId().hashCode();
if (hasEnvironment()) {
hash = (37 * hash) + ENVIRONMENT_FIELD_NUMBER;
hash = (53 * hash) + getEnvironment().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for creating a notebook environment.
* </pre>
*
* Protobuf type {@code google.cloud.notebooks.v1beta1.CreateEnvironmentRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.notebooks.v1beta1.CreateEnvironmentRequest)
com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.notebooks.v1beta1.NotebooksProto
.internal_static_google_cloud_notebooks_v1beta1_CreateEnvironmentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.notebooks.v1beta1.NotebooksProto
.internal_static_google_cloud_notebooks_v1beta1_CreateEnvironmentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest.class,
com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest.Builder.class);
}
// Construct using com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getEnvironmentFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
environmentId_ = "";
environment_ = null;
if (environmentBuilder_ != null) {
environmentBuilder_.dispose();
environmentBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.notebooks.v1beta1.NotebooksProto
.internal_static_google_cloud_notebooks_v1beta1_CreateEnvironmentRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest getDefaultInstanceForType() {
return com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest build() {
com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest buildPartial() {
com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest result =
new com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.environmentId_ = environmentId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.environment_ =
environmentBuilder_ == null ? environment_ : environmentBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest) {
return mergeFrom((com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest other) {
if (other == com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getEnvironmentId().isEmpty()) {
environmentId_ = other.environmentId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasEnvironment()) {
mergeEnvironment(other.getEnvironment());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
environmentId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getEnvironmentFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Format: `projects/{project_id}/locations/{location}`
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Format: `projects/{project_id}/locations/{location}`
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Format: `projects/{project_id}/locations/{location}`
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Format: `projects/{project_id}/locations/{location}`
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Format: `projects/{project_id}/locations/{location}`
* </pre>
*
* <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object environmentId_ = "";
/**
*
*
* <pre>
* Required. User-defined unique ID of this environment. The `environment_id` must
* be 1 to 63 characters long and contain only lowercase letters,
* numeric characters, and dashes. The first character must be a lowercase
* letter and the last character cannot be a dash.
* </pre>
*
* <code>string environment_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The environmentId.
*/
public java.lang.String getEnvironmentId() {
java.lang.Object ref = environmentId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
environmentId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. User-defined unique ID of this environment. The `environment_id` must
* be 1 to 63 characters long and contain only lowercase letters,
* numeric characters, and dashes. The first character must be a lowercase
* letter and the last character cannot be a dash.
* </pre>
*
* <code>string environment_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for environmentId.
*/
public com.google.protobuf.ByteString getEnvironmentIdBytes() {
java.lang.Object ref = environmentId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
environmentId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. User-defined unique ID of this environment. The `environment_id` must
* be 1 to 63 characters long and contain only lowercase letters,
* numeric characters, and dashes. The first character must be a lowercase
* letter and the last character cannot be a dash.
* </pre>
*
* <code>string environment_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The environmentId to set.
* @return This builder for chaining.
*/
public Builder setEnvironmentId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
environmentId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. User-defined unique ID of this environment. The `environment_id` must
* be 1 to 63 characters long and contain only lowercase letters,
* numeric characters, and dashes. The first character must be a lowercase
* letter and the last character cannot be a dash.
* </pre>
*
* <code>string environment_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearEnvironmentId() {
environmentId_ = getDefaultInstance().getEnvironmentId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. User-defined unique ID of this environment. The `environment_id` must
* be 1 to 63 characters long and contain only lowercase letters,
* numeric characters, and dashes. The first character must be a lowercase
* letter and the last character cannot be a dash.
* </pre>
*
* <code>string environment_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for environmentId to set.
* @return This builder for chaining.
*/
public Builder setEnvironmentIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
environmentId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.notebooks.v1beta1.Environment environment_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.notebooks.v1beta1.Environment,
com.google.cloud.notebooks.v1beta1.Environment.Builder,
com.google.cloud.notebooks.v1beta1.EnvironmentOrBuilder>
environmentBuilder_;
/**
*
*
* <pre>
* Required. The environment to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1beta1.Environment environment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the environment field is set.
*/
public boolean hasEnvironment() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. The environment to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1beta1.Environment environment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The environment.
*/
public com.google.cloud.notebooks.v1beta1.Environment getEnvironment() {
if (environmentBuilder_ == null) {
return environment_ == null
? com.google.cloud.notebooks.v1beta1.Environment.getDefaultInstance()
: environment_;
} else {
return environmentBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The environment to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1beta1.Environment environment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setEnvironment(com.google.cloud.notebooks.v1beta1.Environment value) {
if (environmentBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
environment_ = value;
} else {
environmentBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The environment to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1beta1.Environment environment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setEnvironment(
com.google.cloud.notebooks.v1beta1.Environment.Builder builderForValue) {
if (environmentBuilder_ == null) {
environment_ = builderForValue.build();
} else {
environmentBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The environment to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1beta1.Environment environment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeEnvironment(com.google.cloud.notebooks.v1beta1.Environment value) {
if (environmentBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& environment_ != null
&& environment_
!= com.google.cloud.notebooks.v1beta1.Environment.getDefaultInstance()) {
getEnvironmentBuilder().mergeFrom(value);
} else {
environment_ = value;
}
} else {
environmentBuilder_.mergeFrom(value);
}
if (environment_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The environment to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1beta1.Environment environment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearEnvironment() {
bitField0_ = (bitField0_ & ~0x00000004);
environment_ = null;
if (environmentBuilder_ != null) {
environmentBuilder_.dispose();
environmentBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The environment to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1beta1.Environment environment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.notebooks.v1beta1.Environment.Builder getEnvironmentBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getEnvironmentFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The environment to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1beta1.Environment environment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.notebooks.v1beta1.EnvironmentOrBuilder getEnvironmentOrBuilder() {
if (environmentBuilder_ != null) {
return environmentBuilder_.getMessageOrBuilder();
} else {
return environment_ == null
? com.google.cloud.notebooks.v1beta1.Environment.getDefaultInstance()
: environment_;
}
}
/**
*
*
* <pre>
* Required. The environment to be created.
* </pre>
*
* <code>
* .google.cloud.notebooks.v1beta1.Environment environment = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.notebooks.v1beta1.Environment,
com.google.cloud.notebooks.v1beta1.Environment.Builder,
com.google.cloud.notebooks.v1beta1.EnvironmentOrBuilder>
getEnvironmentFieldBuilder() {
if (environmentBuilder_ == null) {
environmentBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.notebooks.v1beta1.Environment,
com.google.cloud.notebooks.v1beta1.Environment.Builder,
com.google.cloud.notebooks.v1beta1.EnvironmentOrBuilder>(
getEnvironment(), getParentForChildren(), isClean());
environment_ = null;
}
return environmentBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.notebooks.v1beta1.CreateEnvironmentRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.notebooks.v1beta1.CreateEnvironmentRequest)
private static final com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest();
}
public static com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateEnvironmentRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateEnvironmentRequest>() {
@java.lang.Override
public CreateEnvironmentRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateEnvironmentRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateEnvironmentRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.notebooks.v1beta1.CreateEnvironmentRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,732 | java-dataproc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/WorkflowTemplatePlacement.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataproc/v1/workflow_templates.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dataproc.v1;
/**
*
*
* <pre>
* Specifies workflow execution target.
*
* Either `managed_cluster` or `cluster_selector` is required.
* </pre>
*
* Protobuf type {@code google.cloud.dataproc.v1.WorkflowTemplatePlacement}
*/
public final class WorkflowTemplatePlacement extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.WorkflowTemplatePlacement)
WorkflowTemplatePlacementOrBuilder {
private static final long serialVersionUID = 0L;
// Use WorkflowTemplatePlacement.newBuilder() to construct.
private WorkflowTemplatePlacement(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private WorkflowTemplatePlacement() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new WorkflowTemplatePlacement();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataproc.v1.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1_WorkflowTemplatePlacement_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataproc.v1.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1_WorkflowTemplatePlacement_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.class,
com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.Builder.class);
}
private int placementCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object placement_;
public enum PlacementCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
MANAGED_CLUSTER(1),
CLUSTER_SELECTOR(2),
PLACEMENT_NOT_SET(0);
private final int value;
private PlacementCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static PlacementCase valueOf(int value) {
return forNumber(value);
}
public static PlacementCase forNumber(int value) {
switch (value) {
case 1:
return MANAGED_CLUSTER;
case 2:
return CLUSTER_SELECTOR;
case 0:
return PLACEMENT_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public PlacementCase getPlacementCase() {
return PlacementCase.forNumber(placementCase_);
}
public static final int MANAGED_CLUSTER_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* A cluster that is managed by the workflow.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1;</code>
*
* @return Whether the managedCluster field is set.
*/
@java.lang.Override
public boolean hasManagedCluster() {
return placementCase_ == 1;
}
/**
*
*
* <pre>
* A cluster that is managed by the workflow.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1;</code>
*
* @return The managedCluster.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.ManagedCluster getManagedCluster() {
if (placementCase_ == 1) {
return (com.google.cloud.dataproc.v1.ManagedCluster) placement_;
}
return com.google.cloud.dataproc.v1.ManagedCluster.getDefaultInstance();
}
/**
*
*
* <pre>
* A cluster that is managed by the workflow.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.ManagedClusterOrBuilder getManagedClusterOrBuilder() {
if (placementCase_ == 1) {
return (com.google.cloud.dataproc.v1.ManagedCluster) placement_;
}
return com.google.cloud.dataproc.v1.ManagedCluster.getDefaultInstance();
}
public static final int CLUSTER_SELECTOR_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* Optional. A selector that chooses target cluster for jobs based
* on metadata.
*
* The selector is evaluated at the time each job is submitted.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2;</code>
*
* @return Whether the clusterSelector field is set.
*/
@java.lang.Override
public boolean hasClusterSelector() {
return placementCase_ == 2;
}
/**
*
*
* <pre>
* Optional. A selector that chooses target cluster for jobs based
* on metadata.
*
* The selector is evaluated at the time each job is submitted.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2;</code>
*
* @return The clusterSelector.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.ClusterSelector getClusterSelector() {
if (placementCase_ == 2) {
return (com.google.cloud.dataproc.v1.ClusterSelector) placement_;
}
return com.google.cloud.dataproc.v1.ClusterSelector.getDefaultInstance();
}
/**
*
*
* <pre>
* Optional. A selector that chooses target cluster for jobs based
* on metadata.
*
* The selector is evaluated at the time each job is submitted.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2;</code>
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.ClusterSelectorOrBuilder getClusterSelectorOrBuilder() {
if (placementCase_ == 2) {
return (com.google.cloud.dataproc.v1.ClusterSelector) placement_;
}
return com.google.cloud.dataproc.v1.ClusterSelector.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (placementCase_ == 1) {
output.writeMessage(1, (com.google.cloud.dataproc.v1.ManagedCluster) placement_);
}
if (placementCase_ == 2) {
output.writeMessage(2, (com.google.cloud.dataproc.v1.ClusterSelector) placement_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (placementCase_ == 1) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1, (com.google.cloud.dataproc.v1.ManagedCluster) placement_);
}
if (placementCase_ == 2) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
2, (com.google.cloud.dataproc.v1.ClusterSelector) placement_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataproc.v1.WorkflowTemplatePlacement)) {
return super.equals(obj);
}
com.google.cloud.dataproc.v1.WorkflowTemplatePlacement other =
(com.google.cloud.dataproc.v1.WorkflowTemplatePlacement) obj;
if (!getPlacementCase().equals(other.getPlacementCase())) return false;
switch (placementCase_) {
case 1:
if (!getManagedCluster().equals(other.getManagedCluster())) return false;
break;
case 2:
if (!getClusterSelector().equals(other.getClusterSelector())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
switch (placementCase_) {
case 1:
hash = (37 * hash) + MANAGED_CLUSTER_FIELD_NUMBER;
hash = (53 * hash) + getManagedCluster().hashCode();
break;
case 2:
hash = (37 * hash) + CLUSTER_SELECTOR_FIELD_NUMBER;
hash = (53 * hash) + getClusterSelector().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dataproc.v1.WorkflowTemplatePlacement prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Specifies workflow execution target.
*
* Either `managed_cluster` or `cluster_selector` is required.
* </pre>
*
* Protobuf type {@code google.cloud.dataproc.v1.WorkflowTemplatePlacement}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.WorkflowTemplatePlacement)
com.google.cloud.dataproc.v1.WorkflowTemplatePlacementOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataproc.v1.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1_WorkflowTemplatePlacement_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataproc.v1.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1_WorkflowTemplatePlacement_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.class,
com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.Builder.class);
}
// Construct using com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (managedClusterBuilder_ != null) {
managedClusterBuilder_.clear();
}
if (clusterSelectorBuilder_ != null) {
clusterSelectorBuilder_.clear();
}
placementCase_ = 0;
placement_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataproc.v1.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1_WorkflowTemplatePlacement_descriptor;
}
@java.lang.Override
public com.google.cloud.dataproc.v1.WorkflowTemplatePlacement getDefaultInstanceForType() {
return com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataproc.v1.WorkflowTemplatePlacement build() {
com.google.cloud.dataproc.v1.WorkflowTemplatePlacement result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataproc.v1.WorkflowTemplatePlacement buildPartial() {
com.google.cloud.dataproc.v1.WorkflowTemplatePlacement result =
new com.google.cloud.dataproc.v1.WorkflowTemplatePlacement(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dataproc.v1.WorkflowTemplatePlacement result) {
int from_bitField0_ = bitField0_;
}
private void buildPartialOneofs(com.google.cloud.dataproc.v1.WorkflowTemplatePlacement result) {
result.placementCase_ = placementCase_;
result.placement_ = this.placement_;
if (placementCase_ == 1 && managedClusterBuilder_ != null) {
result.placement_ = managedClusterBuilder_.build();
}
if (placementCase_ == 2 && clusterSelectorBuilder_ != null) {
result.placement_ = clusterSelectorBuilder_.build();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataproc.v1.WorkflowTemplatePlacement) {
return mergeFrom((com.google.cloud.dataproc.v1.WorkflowTemplatePlacement) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataproc.v1.WorkflowTemplatePlacement other) {
if (other == com.google.cloud.dataproc.v1.WorkflowTemplatePlacement.getDefaultInstance())
return this;
switch (other.getPlacementCase()) {
case MANAGED_CLUSTER:
{
mergeManagedCluster(other.getManagedCluster());
break;
}
case CLUSTER_SELECTOR:
{
mergeClusterSelector(other.getClusterSelector());
break;
}
case PLACEMENT_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getManagedClusterFieldBuilder().getBuilder(), extensionRegistry);
placementCase_ = 1;
break;
} // case 10
case 18:
{
input.readMessage(getClusterSelectorFieldBuilder().getBuilder(), extensionRegistry);
placementCase_ = 2;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int placementCase_ = 0;
private java.lang.Object placement_;
public PlacementCase getPlacementCase() {
return PlacementCase.forNumber(placementCase_);
}
public Builder clearPlacement() {
placementCase_ = 0;
placement_ = null;
onChanged();
return this;
}
private int bitField0_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.ManagedCluster,
com.google.cloud.dataproc.v1.ManagedCluster.Builder,
com.google.cloud.dataproc.v1.ManagedClusterOrBuilder>
managedClusterBuilder_;
/**
*
*
* <pre>
* A cluster that is managed by the workflow.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1;</code>
*
* @return Whether the managedCluster field is set.
*/
@java.lang.Override
public boolean hasManagedCluster() {
return placementCase_ == 1;
}
/**
*
*
* <pre>
* A cluster that is managed by the workflow.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1;</code>
*
* @return The managedCluster.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.ManagedCluster getManagedCluster() {
if (managedClusterBuilder_ == null) {
if (placementCase_ == 1) {
return (com.google.cloud.dataproc.v1.ManagedCluster) placement_;
}
return com.google.cloud.dataproc.v1.ManagedCluster.getDefaultInstance();
} else {
if (placementCase_ == 1) {
return managedClusterBuilder_.getMessage();
}
return com.google.cloud.dataproc.v1.ManagedCluster.getDefaultInstance();
}
}
/**
*
*
* <pre>
* A cluster that is managed by the workflow.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1;</code>
*/
public Builder setManagedCluster(com.google.cloud.dataproc.v1.ManagedCluster value) {
if (managedClusterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
placement_ = value;
onChanged();
} else {
managedClusterBuilder_.setMessage(value);
}
placementCase_ = 1;
return this;
}
/**
*
*
* <pre>
* A cluster that is managed by the workflow.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1;</code>
*/
public Builder setManagedCluster(
com.google.cloud.dataproc.v1.ManagedCluster.Builder builderForValue) {
if (managedClusterBuilder_ == null) {
placement_ = builderForValue.build();
onChanged();
} else {
managedClusterBuilder_.setMessage(builderForValue.build());
}
placementCase_ = 1;
return this;
}
/**
*
*
* <pre>
* A cluster that is managed by the workflow.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1;</code>
*/
public Builder mergeManagedCluster(com.google.cloud.dataproc.v1.ManagedCluster value) {
if (managedClusterBuilder_ == null) {
if (placementCase_ == 1
&& placement_ != com.google.cloud.dataproc.v1.ManagedCluster.getDefaultInstance()) {
placement_ =
com.google.cloud.dataproc.v1.ManagedCluster.newBuilder(
(com.google.cloud.dataproc.v1.ManagedCluster) placement_)
.mergeFrom(value)
.buildPartial();
} else {
placement_ = value;
}
onChanged();
} else {
if (placementCase_ == 1) {
managedClusterBuilder_.mergeFrom(value);
} else {
managedClusterBuilder_.setMessage(value);
}
}
placementCase_ = 1;
return this;
}
/**
*
*
* <pre>
* A cluster that is managed by the workflow.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1;</code>
*/
public Builder clearManagedCluster() {
if (managedClusterBuilder_ == null) {
if (placementCase_ == 1) {
placementCase_ = 0;
placement_ = null;
onChanged();
}
} else {
if (placementCase_ == 1) {
placementCase_ = 0;
placement_ = null;
}
managedClusterBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* A cluster that is managed by the workflow.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1;</code>
*/
public com.google.cloud.dataproc.v1.ManagedCluster.Builder getManagedClusterBuilder() {
return getManagedClusterFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* A cluster that is managed by the workflow.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.ManagedClusterOrBuilder getManagedClusterOrBuilder() {
if ((placementCase_ == 1) && (managedClusterBuilder_ != null)) {
return managedClusterBuilder_.getMessageOrBuilder();
} else {
if (placementCase_ == 1) {
return (com.google.cloud.dataproc.v1.ManagedCluster) placement_;
}
return com.google.cloud.dataproc.v1.ManagedCluster.getDefaultInstance();
}
}
/**
*
*
* <pre>
* A cluster that is managed by the workflow.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ManagedCluster managed_cluster = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.ManagedCluster,
com.google.cloud.dataproc.v1.ManagedCluster.Builder,
com.google.cloud.dataproc.v1.ManagedClusterOrBuilder>
getManagedClusterFieldBuilder() {
if (managedClusterBuilder_ == null) {
if (!(placementCase_ == 1)) {
placement_ = com.google.cloud.dataproc.v1.ManagedCluster.getDefaultInstance();
}
managedClusterBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.ManagedCluster,
com.google.cloud.dataproc.v1.ManagedCluster.Builder,
com.google.cloud.dataproc.v1.ManagedClusterOrBuilder>(
(com.google.cloud.dataproc.v1.ManagedCluster) placement_,
getParentForChildren(),
isClean());
placement_ = null;
}
placementCase_ = 1;
onChanged();
return managedClusterBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.ClusterSelector,
com.google.cloud.dataproc.v1.ClusterSelector.Builder,
com.google.cloud.dataproc.v1.ClusterSelectorOrBuilder>
clusterSelectorBuilder_;
/**
*
*
* <pre>
* Optional. A selector that chooses target cluster for jobs based
* on metadata.
*
* The selector is evaluated at the time each job is submitted.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2;</code>
*
* @return Whether the clusterSelector field is set.
*/
@java.lang.Override
public boolean hasClusterSelector() {
return placementCase_ == 2;
}
/**
*
*
* <pre>
* Optional. A selector that chooses target cluster for jobs based
* on metadata.
*
* The selector is evaluated at the time each job is submitted.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2;</code>
*
* @return The clusterSelector.
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.ClusterSelector getClusterSelector() {
if (clusterSelectorBuilder_ == null) {
if (placementCase_ == 2) {
return (com.google.cloud.dataproc.v1.ClusterSelector) placement_;
}
return com.google.cloud.dataproc.v1.ClusterSelector.getDefaultInstance();
} else {
if (placementCase_ == 2) {
return clusterSelectorBuilder_.getMessage();
}
return com.google.cloud.dataproc.v1.ClusterSelector.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Optional. A selector that chooses target cluster for jobs based
* on metadata.
*
* The selector is evaluated at the time each job is submitted.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2;</code>
*/
public Builder setClusterSelector(com.google.cloud.dataproc.v1.ClusterSelector value) {
if (clusterSelectorBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
placement_ = value;
onChanged();
} else {
clusterSelectorBuilder_.setMessage(value);
}
placementCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Optional. A selector that chooses target cluster for jobs based
* on metadata.
*
* The selector is evaluated at the time each job is submitted.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2;</code>
*/
public Builder setClusterSelector(
com.google.cloud.dataproc.v1.ClusterSelector.Builder builderForValue) {
if (clusterSelectorBuilder_ == null) {
placement_ = builderForValue.build();
onChanged();
} else {
clusterSelectorBuilder_.setMessage(builderForValue.build());
}
placementCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Optional. A selector that chooses target cluster for jobs based
* on metadata.
*
* The selector is evaluated at the time each job is submitted.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2;</code>
*/
public Builder mergeClusterSelector(com.google.cloud.dataproc.v1.ClusterSelector value) {
if (clusterSelectorBuilder_ == null) {
if (placementCase_ == 2
&& placement_ != com.google.cloud.dataproc.v1.ClusterSelector.getDefaultInstance()) {
placement_ =
com.google.cloud.dataproc.v1.ClusterSelector.newBuilder(
(com.google.cloud.dataproc.v1.ClusterSelector) placement_)
.mergeFrom(value)
.buildPartial();
} else {
placement_ = value;
}
onChanged();
} else {
if (placementCase_ == 2) {
clusterSelectorBuilder_.mergeFrom(value);
} else {
clusterSelectorBuilder_.setMessage(value);
}
}
placementCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Optional. A selector that chooses target cluster for jobs based
* on metadata.
*
* The selector is evaluated at the time each job is submitted.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2;</code>
*/
public Builder clearClusterSelector() {
if (clusterSelectorBuilder_ == null) {
if (placementCase_ == 2) {
placementCase_ = 0;
placement_ = null;
onChanged();
}
} else {
if (placementCase_ == 2) {
placementCase_ = 0;
placement_ = null;
}
clusterSelectorBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Optional. A selector that chooses target cluster for jobs based
* on metadata.
*
* The selector is evaluated at the time each job is submitted.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2;</code>
*/
public com.google.cloud.dataproc.v1.ClusterSelector.Builder getClusterSelectorBuilder() {
return getClusterSelectorFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. A selector that chooses target cluster for jobs based
* on metadata.
*
* The selector is evaluated at the time each job is submitted.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2;</code>
*/
@java.lang.Override
public com.google.cloud.dataproc.v1.ClusterSelectorOrBuilder getClusterSelectorOrBuilder() {
if ((placementCase_ == 2) && (clusterSelectorBuilder_ != null)) {
return clusterSelectorBuilder_.getMessageOrBuilder();
} else {
if (placementCase_ == 2) {
return (com.google.cloud.dataproc.v1.ClusterSelector) placement_;
}
return com.google.cloud.dataproc.v1.ClusterSelector.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Optional. A selector that chooses target cluster for jobs based
* on metadata.
*
* The selector is evaluated at the time each job is submitted.
* </pre>
*
* <code>.google.cloud.dataproc.v1.ClusterSelector cluster_selector = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.ClusterSelector,
com.google.cloud.dataproc.v1.ClusterSelector.Builder,
com.google.cloud.dataproc.v1.ClusterSelectorOrBuilder>
getClusterSelectorFieldBuilder() {
if (clusterSelectorBuilder_ == null) {
if (!(placementCase_ == 2)) {
placement_ = com.google.cloud.dataproc.v1.ClusterSelector.getDefaultInstance();
}
clusterSelectorBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataproc.v1.ClusterSelector,
com.google.cloud.dataproc.v1.ClusterSelector.Builder,
com.google.cloud.dataproc.v1.ClusterSelectorOrBuilder>(
(com.google.cloud.dataproc.v1.ClusterSelector) placement_,
getParentForChildren(),
isClean());
placement_ = null;
}
placementCase_ = 2;
onChanged();
return clusterSelectorBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.WorkflowTemplatePlacement)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.WorkflowTemplatePlacement)
private static final com.google.cloud.dataproc.v1.WorkflowTemplatePlacement DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.WorkflowTemplatePlacement();
}
public static com.google.cloud.dataproc.v1.WorkflowTemplatePlacement getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<WorkflowTemplatePlacement> PARSER =
new com.google.protobuf.AbstractParser<WorkflowTemplatePlacement>() {
@java.lang.Override
public WorkflowTemplatePlacement parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<WorkflowTemplatePlacement> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<WorkflowTemplatePlacement> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataproc.v1.WorkflowTemplatePlacement getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,706 | java-biglake/proto-google-cloud-biglake-v1/src/main/java/com/google/cloud/bigquery/biglake/v1/CreateDatabaseRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/biglake/v1/metastore.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.biglake.v1;
/**
*
*
* <pre>
* Request message for the CreateDatabase method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.biglake.v1.CreateDatabaseRequest}
*/
public final class CreateDatabaseRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1.CreateDatabaseRequest)
CreateDatabaseRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateDatabaseRequest.newBuilder() to construct.
private CreateDatabaseRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateDatabaseRequest() {
parent_ = "";
databaseId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateDatabaseRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_CreateDatabaseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_CreateDatabaseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest.class,
com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource where this database will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent resource where this database will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DATABASE_FIELD_NUMBER = 2;
private com.google.cloud.bigquery.biglake.v1.Database database_;
/**
*
*
* <pre>
* Required. The database to create.
* The `name` field does not need to be provided.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the database field is set.
*/
@java.lang.Override
public boolean hasDatabase() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The database to create.
* The `name` field does not need to be provided.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The database.
*/
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.Database getDatabase() {
return database_ == null
? com.google.cloud.bigquery.biglake.v1.Database.getDefaultInstance()
: database_;
}
/**
*
*
* <pre>
* Required. The database to create.
* The `name` field does not need to be provided.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.DatabaseOrBuilder getDatabaseOrBuilder() {
return database_ == null
? com.google.cloud.bigquery.biglake.v1.Database.getDefaultInstance()
: database_;
}
public static final int DATABASE_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object databaseId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the database, which will become the final
* component of the database's resource name.
* </pre>
*
* <code>string database_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The databaseId.
*/
@java.lang.Override
public java.lang.String getDatabaseId() {
java.lang.Object ref = databaseId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
databaseId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the database, which will become the final
* component of the database's resource name.
* </pre>
*
* <code>string database_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for databaseId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDatabaseIdBytes() {
java.lang.Object ref = databaseId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
databaseId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getDatabase());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(databaseId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, databaseId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getDatabase());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(databaseId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, databaseId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest)) {
return super.equals(obj);
}
com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest other =
(com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasDatabase() != other.hasDatabase()) return false;
if (hasDatabase()) {
if (!getDatabase().equals(other.getDatabase())) return false;
}
if (!getDatabaseId().equals(other.getDatabaseId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasDatabase()) {
hash = (37 * hash) + DATABASE_FIELD_NUMBER;
hash = (53 * hash) + getDatabase().hashCode();
}
hash = (37 * hash) + DATABASE_ID_FIELD_NUMBER;
hash = (53 * hash) + getDatabaseId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the CreateDatabase method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.biglake.v1.CreateDatabaseRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1.CreateDatabaseRequest)
com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_CreateDatabaseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_CreateDatabaseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest.class,
com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest.Builder.class);
}
// Construct using com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getDatabaseFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
database_ = null;
if (databaseBuilder_ != null) {
databaseBuilder_.dispose();
databaseBuilder_ = null;
}
databaseId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_CreateDatabaseRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest getDefaultInstanceForType() {
return com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest build() {
com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest buildPartial() {
com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest result =
new com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.database_ = databaseBuilder_ == null ? database_ : databaseBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.databaseId_ = databaseId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest) {
return mergeFrom((com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest other) {
if (other == com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasDatabase()) {
mergeDatabase(other.getDatabase());
}
if (!other.getDatabaseId().isEmpty()) {
databaseId_ = other.databaseId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getDatabaseFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
databaseId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource where this database will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource where this database will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource where this database will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource where this database will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource where this database will be created.
* Format:
* projects/{project_id_or_number}/locations/{location_id}/catalogs/{catalog_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.bigquery.biglake.v1.Database database_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1.Database,
com.google.cloud.bigquery.biglake.v1.Database.Builder,
com.google.cloud.bigquery.biglake.v1.DatabaseOrBuilder>
databaseBuilder_;
/**
*
*
* <pre>
* Required. The database to create.
* The `name` field does not need to be provided.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the database field is set.
*/
public boolean hasDatabase() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The database to create.
* The `name` field does not need to be provided.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The database.
*/
public com.google.cloud.bigquery.biglake.v1.Database getDatabase() {
if (databaseBuilder_ == null) {
return database_ == null
? com.google.cloud.bigquery.biglake.v1.Database.getDefaultInstance()
: database_;
} else {
return databaseBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The database to create.
* The `name` field does not need to be provided.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDatabase(com.google.cloud.bigquery.biglake.v1.Database value) {
if (databaseBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
database_ = value;
} else {
databaseBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The database to create.
* The `name` field does not need to be provided.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDatabase(
com.google.cloud.bigquery.biglake.v1.Database.Builder builderForValue) {
if (databaseBuilder_ == null) {
database_ = builderForValue.build();
} else {
databaseBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The database to create.
* The `name` field does not need to be provided.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeDatabase(com.google.cloud.bigquery.biglake.v1.Database value) {
if (databaseBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& database_ != null
&& database_ != com.google.cloud.bigquery.biglake.v1.Database.getDefaultInstance()) {
getDatabaseBuilder().mergeFrom(value);
} else {
database_ = value;
}
} else {
databaseBuilder_.mergeFrom(value);
}
if (database_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The database to create.
* The `name` field does not need to be provided.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearDatabase() {
bitField0_ = (bitField0_ & ~0x00000002);
database_ = null;
if (databaseBuilder_ != null) {
databaseBuilder_.dispose();
databaseBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The database to create.
* The `name` field does not need to be provided.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.bigquery.biglake.v1.Database.Builder getDatabaseBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getDatabaseFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The database to create.
* The `name` field does not need to be provided.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.bigquery.biglake.v1.DatabaseOrBuilder getDatabaseOrBuilder() {
if (databaseBuilder_ != null) {
return databaseBuilder_.getMessageOrBuilder();
} else {
return database_ == null
? com.google.cloud.bigquery.biglake.v1.Database.getDefaultInstance()
: database_;
}
}
/**
*
*
* <pre>
* Required. The database to create.
* The `name` field does not need to be provided.
* </pre>
*
* <code>
* .google.cloud.bigquery.biglake.v1.Database database = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1.Database,
com.google.cloud.bigquery.biglake.v1.Database.Builder,
com.google.cloud.bigquery.biglake.v1.DatabaseOrBuilder>
getDatabaseFieldBuilder() {
if (databaseBuilder_ == null) {
databaseBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1.Database,
com.google.cloud.bigquery.biglake.v1.Database.Builder,
com.google.cloud.bigquery.biglake.v1.DatabaseOrBuilder>(
getDatabase(), getParentForChildren(), isClean());
database_ = null;
}
return databaseBuilder_;
}
private java.lang.Object databaseId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the database, which will become the final
* component of the database's resource name.
* </pre>
*
* <code>string database_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The databaseId.
*/
public java.lang.String getDatabaseId() {
java.lang.Object ref = databaseId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
databaseId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the database, which will become the final
* component of the database's resource name.
* </pre>
*
* <code>string database_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for databaseId.
*/
public com.google.protobuf.ByteString getDatabaseIdBytes() {
java.lang.Object ref = databaseId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
databaseId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the database, which will become the final
* component of the database's resource name.
* </pre>
*
* <code>string database_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The databaseId to set.
* @return This builder for chaining.
*/
public Builder setDatabaseId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
databaseId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the database, which will become the final
* component of the database's resource name.
* </pre>
*
* <code>string database_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearDatabaseId() {
databaseId_ = getDefaultInstance().getDatabaseId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the database, which will become the final
* component of the database's resource name.
* </pre>
*
* <code>string database_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for databaseId to set.
* @return This builder for chaining.
*/
public Builder setDatabaseIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
databaseId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1.CreateDatabaseRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1.CreateDatabaseRequest)
private static final com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest();
}
public static com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateDatabaseRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateDatabaseRequest>() {
@java.lang.Override
public CreateDatabaseRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateDatabaseRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateDatabaseRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.CreateDatabaseRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 37,770 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/resources/CampaignCriterionOrBuilder.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/resources/campaign_criterion.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.resources;
public interface CampaignCriterionOrBuilder extends
// @@protoc_insertion_point(interface_extends:google.ads.googleads.v21.resources.CampaignCriterion)
com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* Immutable. The resource name of the campaign criterion.
* Campaign criterion resource names have the form:
*
* `customers/{customer_id}/campaignCriteria/{campaign_id}~{criterion_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The resourceName.
*/
java.lang.String getResourceName();
/**
* <pre>
* Immutable. The resource name of the campaign criterion.
* Campaign criterion resource names have the form:
*
* `customers/{customer_id}/campaignCriteria/{campaign_id}~{criterion_id}`
* </pre>
*
* <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for resourceName.
*/
com.google.protobuf.ByteString
getResourceNameBytes();
/**
* <pre>
* Immutable. The campaign to which the criterion belongs.
* </pre>
*
* <code>optional string campaign = 37 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return Whether the campaign field is set.
*/
boolean hasCampaign();
/**
* <pre>
* Immutable. The campaign to which the criterion belongs.
* </pre>
*
* <code>optional string campaign = 37 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The campaign.
*/
java.lang.String getCampaign();
/**
* <pre>
* Immutable. The campaign to which the criterion belongs.
* </pre>
*
* <code>optional string campaign = 37 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code>
* @return The bytes for campaign.
*/
com.google.protobuf.ByteString
getCampaignBytes();
/**
* <pre>
* Output only. The ID of the criterion.
*
* This field is ignored during mutate.
* </pre>
*
* <code>optional int64 criterion_id = 38 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return Whether the criterionId field is set.
*/
boolean hasCriterionId();
/**
* <pre>
* Output only. The ID of the criterion.
*
* This field is ignored during mutate.
* </pre>
*
* <code>optional int64 criterion_id = 38 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The criterionId.
*/
long getCriterionId();
/**
* <pre>
* Output only. The display name of the criterion.
*
* This field is ignored for mutates.
* </pre>
*
* <code>string display_name = 43 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The displayName.
*/
java.lang.String getDisplayName();
/**
* <pre>
* Output only. The display name of the criterion.
*
* This field is ignored for mutates.
* </pre>
*
* <code>string display_name = 43 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The bytes for displayName.
*/
com.google.protobuf.ByteString
getDisplayNameBytes();
/**
* <pre>
* The modifier for the bids when the criterion matches. The modifier must be
* in the range: 0.1 - 10.0. Most targetable criteria types support modifiers.
* Use 0 to opt out of a Device type.
* </pre>
*
* <code>optional float bid_modifier = 39;</code>
* @return Whether the bidModifier field is set.
*/
boolean hasBidModifier();
/**
* <pre>
* The modifier for the bids when the criterion matches. The modifier must be
* in the range: 0.1 - 10.0. Most targetable criteria types support modifiers.
* Use 0 to opt out of a Device type.
* </pre>
*
* <code>optional float bid_modifier = 39;</code>
* @return The bidModifier.
*/
float getBidModifier();
/**
* <pre>
* Immutable. Whether to target (`false`) or exclude (`true`) the criterion.
* </pre>
*
* <code>optional bool negative = 40 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the negative field is set.
*/
boolean hasNegative();
/**
* <pre>
* Immutable. Whether to target (`false`) or exclude (`true`) the criterion.
* </pre>
*
* <code>optional bool negative = 40 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The negative.
*/
boolean getNegative();
/**
* <pre>
* Output only. The type of the criterion.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.CriterionTypeEnum.CriterionType type = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The enum numeric value on the wire for type.
*/
int getTypeValue();
/**
* <pre>
* Output only. The type of the criterion.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.CriterionTypeEnum.CriterionType type = 6 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
* @return The type.
*/
com.google.ads.googleads.v21.enums.CriterionTypeEnum.CriterionType getType();
/**
* <pre>
* The status of the criterion.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.CampaignCriterionStatusEnum.CampaignCriterionStatus status = 35;</code>
* @return The enum numeric value on the wire for status.
*/
int getStatusValue();
/**
* <pre>
* The status of the criterion.
* </pre>
*
* <code>.google.ads.googleads.v21.enums.CampaignCriterionStatusEnum.CampaignCriterionStatus status = 35;</code>
* @return The status.
*/
com.google.ads.googleads.v21.enums.CampaignCriterionStatusEnum.CampaignCriterionStatus getStatus();
/**
* <pre>
* Immutable. Keyword.
* </pre>
*
* <code>.google.ads.googleads.v21.common.KeywordInfo keyword = 8 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the keyword field is set.
*/
boolean hasKeyword();
/**
* <pre>
* Immutable. Keyword.
* </pre>
*
* <code>.google.ads.googleads.v21.common.KeywordInfo keyword = 8 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The keyword.
*/
com.google.ads.googleads.v21.common.KeywordInfo getKeyword();
/**
* <pre>
* Immutable. Keyword.
* </pre>
*
* <code>.google.ads.googleads.v21.common.KeywordInfo keyword = 8 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.KeywordInfoOrBuilder getKeywordOrBuilder();
/**
* <pre>
* Immutable. Placement.
* </pre>
*
* <code>.google.ads.googleads.v21.common.PlacementInfo placement = 9 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the placement field is set.
*/
boolean hasPlacement();
/**
* <pre>
* Immutable. Placement.
* </pre>
*
* <code>.google.ads.googleads.v21.common.PlacementInfo placement = 9 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The placement.
*/
com.google.ads.googleads.v21.common.PlacementInfo getPlacement();
/**
* <pre>
* Immutable. Placement.
* </pre>
*
* <code>.google.ads.googleads.v21.common.PlacementInfo placement = 9 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.PlacementInfoOrBuilder getPlacementOrBuilder();
/**
* <pre>
* Immutable. Mobile app category.
* </pre>
*
* <code>.google.ads.googleads.v21.common.MobileAppCategoryInfo mobile_app_category = 10 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the mobileAppCategory field is set.
*/
boolean hasMobileAppCategory();
/**
* <pre>
* Immutable. Mobile app category.
* </pre>
*
* <code>.google.ads.googleads.v21.common.MobileAppCategoryInfo mobile_app_category = 10 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The mobileAppCategory.
*/
com.google.ads.googleads.v21.common.MobileAppCategoryInfo getMobileAppCategory();
/**
* <pre>
* Immutable. Mobile app category.
* </pre>
*
* <code>.google.ads.googleads.v21.common.MobileAppCategoryInfo mobile_app_category = 10 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.MobileAppCategoryInfoOrBuilder getMobileAppCategoryOrBuilder();
/**
* <pre>
* Immutable. Mobile application.
* </pre>
*
* <code>.google.ads.googleads.v21.common.MobileApplicationInfo mobile_application = 11 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the mobileApplication field is set.
*/
boolean hasMobileApplication();
/**
* <pre>
* Immutable. Mobile application.
* </pre>
*
* <code>.google.ads.googleads.v21.common.MobileApplicationInfo mobile_application = 11 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The mobileApplication.
*/
com.google.ads.googleads.v21.common.MobileApplicationInfo getMobileApplication();
/**
* <pre>
* Immutable. Mobile application.
* </pre>
*
* <code>.google.ads.googleads.v21.common.MobileApplicationInfo mobile_application = 11 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.MobileApplicationInfoOrBuilder getMobileApplicationOrBuilder();
/**
* <pre>
* Immutable. Location.
* </pre>
*
* <code>.google.ads.googleads.v21.common.LocationInfo location = 12 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the location field is set.
*/
boolean hasLocation();
/**
* <pre>
* Immutable. Location.
* </pre>
*
* <code>.google.ads.googleads.v21.common.LocationInfo location = 12 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The location.
*/
com.google.ads.googleads.v21.common.LocationInfo getLocation();
/**
* <pre>
* Immutable. Location.
* </pre>
*
* <code>.google.ads.googleads.v21.common.LocationInfo location = 12 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.LocationInfoOrBuilder getLocationOrBuilder();
/**
* <pre>
* Immutable. Device.
* </pre>
*
* <code>.google.ads.googleads.v21.common.DeviceInfo device = 13 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the device field is set.
*/
boolean hasDevice();
/**
* <pre>
* Immutable. Device.
* </pre>
*
* <code>.google.ads.googleads.v21.common.DeviceInfo device = 13 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The device.
*/
com.google.ads.googleads.v21.common.DeviceInfo getDevice();
/**
* <pre>
* Immutable. Device.
* </pre>
*
* <code>.google.ads.googleads.v21.common.DeviceInfo device = 13 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.DeviceInfoOrBuilder getDeviceOrBuilder();
/**
* <pre>
* Immutable. Ad Schedule.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdScheduleInfo ad_schedule = 15 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the adSchedule field is set.
*/
boolean hasAdSchedule();
/**
* <pre>
* Immutable. Ad Schedule.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdScheduleInfo ad_schedule = 15 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The adSchedule.
*/
com.google.ads.googleads.v21.common.AdScheduleInfo getAdSchedule();
/**
* <pre>
* Immutable. Ad Schedule.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdScheduleInfo ad_schedule = 15 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.AdScheduleInfoOrBuilder getAdScheduleOrBuilder();
/**
* <pre>
* Immutable. Age range.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AgeRangeInfo age_range = 16 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the ageRange field is set.
*/
boolean hasAgeRange();
/**
* <pre>
* Immutable. Age range.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AgeRangeInfo age_range = 16 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The ageRange.
*/
com.google.ads.googleads.v21.common.AgeRangeInfo getAgeRange();
/**
* <pre>
* Immutable. Age range.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AgeRangeInfo age_range = 16 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.AgeRangeInfoOrBuilder getAgeRangeOrBuilder();
/**
* <pre>
* Immutable. Gender.
* </pre>
*
* <code>.google.ads.googleads.v21.common.GenderInfo gender = 17 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the gender field is set.
*/
boolean hasGender();
/**
* <pre>
* Immutable. Gender.
* </pre>
*
* <code>.google.ads.googleads.v21.common.GenderInfo gender = 17 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The gender.
*/
com.google.ads.googleads.v21.common.GenderInfo getGender();
/**
* <pre>
* Immutable. Gender.
* </pre>
*
* <code>.google.ads.googleads.v21.common.GenderInfo gender = 17 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.GenderInfoOrBuilder getGenderOrBuilder();
/**
* <pre>
* Immutable. Income range.
* </pre>
*
* <code>.google.ads.googleads.v21.common.IncomeRangeInfo income_range = 18 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the incomeRange field is set.
*/
boolean hasIncomeRange();
/**
* <pre>
* Immutable. Income range.
* </pre>
*
* <code>.google.ads.googleads.v21.common.IncomeRangeInfo income_range = 18 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The incomeRange.
*/
com.google.ads.googleads.v21.common.IncomeRangeInfo getIncomeRange();
/**
* <pre>
* Immutable. Income range.
* </pre>
*
* <code>.google.ads.googleads.v21.common.IncomeRangeInfo income_range = 18 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.IncomeRangeInfoOrBuilder getIncomeRangeOrBuilder();
/**
* <pre>
* Immutable. Parental status.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ParentalStatusInfo parental_status = 19 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the parentalStatus field is set.
*/
boolean hasParentalStatus();
/**
* <pre>
* Immutable. Parental status.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ParentalStatusInfo parental_status = 19 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The parentalStatus.
*/
com.google.ads.googleads.v21.common.ParentalStatusInfo getParentalStatus();
/**
* <pre>
* Immutable. Parental status.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ParentalStatusInfo parental_status = 19 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.ParentalStatusInfoOrBuilder getParentalStatusOrBuilder();
/**
* <pre>
* Immutable. User List.
* </pre>
*
* <code>.google.ads.googleads.v21.common.UserListInfo user_list = 22 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the userList field is set.
*/
boolean hasUserList();
/**
* <pre>
* Immutable. User List.
* </pre>
*
* <code>.google.ads.googleads.v21.common.UserListInfo user_list = 22 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The userList.
*/
com.google.ads.googleads.v21.common.UserListInfo getUserList();
/**
* <pre>
* Immutable. User List.
* </pre>
*
* <code>.google.ads.googleads.v21.common.UserListInfo user_list = 22 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.UserListInfoOrBuilder getUserListOrBuilder();
/**
* <pre>
* Immutable. YouTube Video.
* </pre>
*
* <code>.google.ads.googleads.v21.common.YouTubeVideoInfo youtube_video = 20 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the youtubeVideo field is set.
*/
boolean hasYoutubeVideo();
/**
* <pre>
* Immutable. YouTube Video.
* </pre>
*
* <code>.google.ads.googleads.v21.common.YouTubeVideoInfo youtube_video = 20 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The youtubeVideo.
*/
com.google.ads.googleads.v21.common.YouTubeVideoInfo getYoutubeVideo();
/**
* <pre>
* Immutable. YouTube Video.
* </pre>
*
* <code>.google.ads.googleads.v21.common.YouTubeVideoInfo youtube_video = 20 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.YouTubeVideoInfoOrBuilder getYoutubeVideoOrBuilder();
/**
* <pre>
* Immutable. YouTube Channel.
* </pre>
*
* <code>.google.ads.googleads.v21.common.YouTubeChannelInfo youtube_channel = 21 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the youtubeChannel field is set.
*/
boolean hasYoutubeChannel();
/**
* <pre>
* Immutable. YouTube Channel.
* </pre>
*
* <code>.google.ads.googleads.v21.common.YouTubeChannelInfo youtube_channel = 21 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The youtubeChannel.
*/
com.google.ads.googleads.v21.common.YouTubeChannelInfo getYoutubeChannel();
/**
* <pre>
* Immutable. YouTube Channel.
* </pre>
*
* <code>.google.ads.googleads.v21.common.YouTubeChannelInfo youtube_channel = 21 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.YouTubeChannelInfoOrBuilder getYoutubeChannelOrBuilder();
/**
* <pre>
* Immutable. Proximity.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ProximityInfo proximity = 23 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the proximity field is set.
*/
boolean hasProximity();
/**
* <pre>
* Immutable. Proximity.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ProximityInfo proximity = 23 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The proximity.
*/
com.google.ads.googleads.v21.common.ProximityInfo getProximity();
/**
* <pre>
* Immutable. Proximity.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ProximityInfo proximity = 23 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.ProximityInfoOrBuilder getProximityOrBuilder();
/**
* <pre>
* Immutable. Topic.
* </pre>
*
* <code>.google.ads.googleads.v21.common.TopicInfo topic = 24 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the topic field is set.
*/
boolean hasTopic();
/**
* <pre>
* Immutable. Topic.
* </pre>
*
* <code>.google.ads.googleads.v21.common.TopicInfo topic = 24 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The topic.
*/
com.google.ads.googleads.v21.common.TopicInfo getTopic();
/**
* <pre>
* Immutable. Topic.
* </pre>
*
* <code>.google.ads.googleads.v21.common.TopicInfo topic = 24 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.TopicInfoOrBuilder getTopicOrBuilder();
/**
* <pre>
* Immutable. Listing scope.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ListingScopeInfo listing_scope = 25 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the listingScope field is set.
*/
boolean hasListingScope();
/**
* <pre>
* Immutable. Listing scope.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ListingScopeInfo listing_scope = 25 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The listingScope.
*/
com.google.ads.googleads.v21.common.ListingScopeInfo getListingScope();
/**
* <pre>
* Immutable. Listing scope.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ListingScopeInfo listing_scope = 25 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.ListingScopeInfoOrBuilder getListingScopeOrBuilder();
/**
* <pre>
* Immutable. Language.
* </pre>
*
* <code>.google.ads.googleads.v21.common.LanguageInfo language = 26 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the language field is set.
*/
boolean hasLanguage();
/**
* <pre>
* Immutable. Language.
* </pre>
*
* <code>.google.ads.googleads.v21.common.LanguageInfo language = 26 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The language.
*/
com.google.ads.googleads.v21.common.LanguageInfo getLanguage();
/**
* <pre>
* Immutable. Language.
* </pre>
*
* <code>.google.ads.googleads.v21.common.LanguageInfo language = 26 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.LanguageInfoOrBuilder getLanguageOrBuilder();
/**
* <pre>
* Immutable. IpBlock.
* </pre>
*
* <code>.google.ads.googleads.v21.common.IpBlockInfo ip_block = 27 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the ipBlock field is set.
*/
boolean hasIpBlock();
/**
* <pre>
* Immutable. IpBlock.
* </pre>
*
* <code>.google.ads.googleads.v21.common.IpBlockInfo ip_block = 27 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The ipBlock.
*/
com.google.ads.googleads.v21.common.IpBlockInfo getIpBlock();
/**
* <pre>
* Immutable. IpBlock.
* </pre>
*
* <code>.google.ads.googleads.v21.common.IpBlockInfo ip_block = 27 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.IpBlockInfoOrBuilder getIpBlockOrBuilder();
/**
* <pre>
* Immutable. ContentLabel.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ContentLabelInfo content_label = 28 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the contentLabel field is set.
*/
boolean hasContentLabel();
/**
* <pre>
* Immutable. ContentLabel.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ContentLabelInfo content_label = 28 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The contentLabel.
*/
com.google.ads.googleads.v21.common.ContentLabelInfo getContentLabel();
/**
* <pre>
* Immutable. ContentLabel.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ContentLabelInfo content_label = 28 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.ContentLabelInfoOrBuilder getContentLabelOrBuilder();
/**
* <pre>
* Immutable. Carrier.
* </pre>
*
* <code>.google.ads.googleads.v21.common.CarrierInfo carrier = 29 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the carrier field is set.
*/
boolean hasCarrier();
/**
* <pre>
* Immutable. Carrier.
* </pre>
*
* <code>.google.ads.googleads.v21.common.CarrierInfo carrier = 29 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The carrier.
*/
com.google.ads.googleads.v21.common.CarrierInfo getCarrier();
/**
* <pre>
* Immutable. Carrier.
* </pre>
*
* <code>.google.ads.googleads.v21.common.CarrierInfo carrier = 29 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.CarrierInfoOrBuilder getCarrierOrBuilder();
/**
* <pre>
* Immutable. User Interest.
* </pre>
*
* <code>.google.ads.googleads.v21.common.UserInterestInfo user_interest = 30 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the userInterest field is set.
*/
boolean hasUserInterest();
/**
* <pre>
* Immutable. User Interest.
* </pre>
*
* <code>.google.ads.googleads.v21.common.UserInterestInfo user_interest = 30 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The userInterest.
*/
com.google.ads.googleads.v21.common.UserInterestInfo getUserInterest();
/**
* <pre>
* Immutable. User Interest.
* </pre>
*
* <code>.google.ads.googleads.v21.common.UserInterestInfo user_interest = 30 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.UserInterestInfoOrBuilder getUserInterestOrBuilder();
/**
* <pre>
* Immutable. Webpage.
* </pre>
*
* <code>.google.ads.googleads.v21.common.WebpageInfo webpage = 31 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the webpage field is set.
*/
boolean hasWebpage();
/**
* <pre>
* Immutable. Webpage.
* </pre>
*
* <code>.google.ads.googleads.v21.common.WebpageInfo webpage = 31 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The webpage.
*/
com.google.ads.googleads.v21.common.WebpageInfo getWebpage();
/**
* <pre>
* Immutable. Webpage.
* </pre>
*
* <code>.google.ads.googleads.v21.common.WebpageInfo webpage = 31 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.WebpageInfoOrBuilder getWebpageOrBuilder();
/**
* <pre>
* Immutable. Operating system version.
* </pre>
*
* <code>.google.ads.googleads.v21.common.OperatingSystemVersionInfo operating_system_version = 32 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the operatingSystemVersion field is set.
*/
boolean hasOperatingSystemVersion();
/**
* <pre>
* Immutable. Operating system version.
* </pre>
*
* <code>.google.ads.googleads.v21.common.OperatingSystemVersionInfo operating_system_version = 32 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The operatingSystemVersion.
*/
com.google.ads.googleads.v21.common.OperatingSystemVersionInfo getOperatingSystemVersion();
/**
* <pre>
* Immutable. Operating system version.
* </pre>
*
* <code>.google.ads.googleads.v21.common.OperatingSystemVersionInfo operating_system_version = 32 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.OperatingSystemVersionInfoOrBuilder getOperatingSystemVersionOrBuilder();
/**
* <pre>
* Immutable. Mobile Device.
* </pre>
*
* <code>.google.ads.googleads.v21.common.MobileDeviceInfo mobile_device = 33 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the mobileDevice field is set.
*/
boolean hasMobileDevice();
/**
* <pre>
* Immutable. Mobile Device.
* </pre>
*
* <code>.google.ads.googleads.v21.common.MobileDeviceInfo mobile_device = 33 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The mobileDevice.
*/
com.google.ads.googleads.v21.common.MobileDeviceInfo getMobileDevice();
/**
* <pre>
* Immutable. Mobile Device.
* </pre>
*
* <code>.google.ads.googleads.v21.common.MobileDeviceInfo mobile_device = 33 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.MobileDeviceInfoOrBuilder getMobileDeviceOrBuilder();
/**
* <pre>
* Immutable. Location Group
* </pre>
*
* <code>.google.ads.googleads.v21.common.LocationGroupInfo location_group = 34 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the locationGroup field is set.
*/
boolean hasLocationGroup();
/**
* <pre>
* Immutable. Location Group
* </pre>
*
* <code>.google.ads.googleads.v21.common.LocationGroupInfo location_group = 34 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The locationGroup.
*/
com.google.ads.googleads.v21.common.LocationGroupInfo getLocationGroup();
/**
* <pre>
* Immutable. Location Group
* </pre>
*
* <code>.google.ads.googleads.v21.common.LocationGroupInfo location_group = 34 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.LocationGroupInfoOrBuilder getLocationGroupOrBuilder();
/**
* <pre>
* Immutable. Custom Affinity.
* </pre>
*
* <code>.google.ads.googleads.v21.common.CustomAffinityInfo custom_affinity = 36 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the customAffinity field is set.
*/
boolean hasCustomAffinity();
/**
* <pre>
* Immutable. Custom Affinity.
* </pre>
*
* <code>.google.ads.googleads.v21.common.CustomAffinityInfo custom_affinity = 36 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The customAffinity.
*/
com.google.ads.googleads.v21.common.CustomAffinityInfo getCustomAffinity();
/**
* <pre>
* Immutable. Custom Affinity.
* </pre>
*
* <code>.google.ads.googleads.v21.common.CustomAffinityInfo custom_affinity = 36 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.CustomAffinityInfoOrBuilder getCustomAffinityOrBuilder();
/**
* <pre>
* Immutable. Custom Audience
* </pre>
*
* <code>.google.ads.googleads.v21.common.CustomAudienceInfo custom_audience = 41 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the customAudience field is set.
*/
boolean hasCustomAudience();
/**
* <pre>
* Immutable. Custom Audience
* </pre>
*
* <code>.google.ads.googleads.v21.common.CustomAudienceInfo custom_audience = 41 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The customAudience.
*/
com.google.ads.googleads.v21.common.CustomAudienceInfo getCustomAudience();
/**
* <pre>
* Immutable. Custom Audience
* </pre>
*
* <code>.google.ads.googleads.v21.common.CustomAudienceInfo custom_audience = 41 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.CustomAudienceInfoOrBuilder getCustomAudienceOrBuilder();
/**
* <pre>
* Immutable. Combined Audience.
* </pre>
*
* <code>.google.ads.googleads.v21.common.CombinedAudienceInfo combined_audience = 42 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the combinedAudience field is set.
*/
boolean hasCombinedAudience();
/**
* <pre>
* Immutable. Combined Audience.
* </pre>
*
* <code>.google.ads.googleads.v21.common.CombinedAudienceInfo combined_audience = 42 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The combinedAudience.
*/
com.google.ads.googleads.v21.common.CombinedAudienceInfo getCombinedAudience();
/**
* <pre>
* Immutable. Combined Audience.
* </pre>
*
* <code>.google.ads.googleads.v21.common.CombinedAudienceInfo combined_audience = 42 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.CombinedAudienceInfoOrBuilder getCombinedAudienceOrBuilder();
/**
* <pre>
* Immutable. Smart Campaign Keyword Theme.
* </pre>
*
* <code>.google.ads.googleads.v21.common.KeywordThemeInfo keyword_theme = 45 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the keywordTheme field is set.
*/
boolean hasKeywordTheme();
/**
* <pre>
* Immutable. Smart Campaign Keyword Theme.
* </pre>
*
* <code>.google.ads.googleads.v21.common.KeywordThemeInfo keyword_theme = 45 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The keywordTheme.
*/
com.google.ads.googleads.v21.common.KeywordThemeInfo getKeywordTheme();
/**
* <pre>
* Immutable. Smart Campaign Keyword Theme.
* </pre>
*
* <code>.google.ads.googleads.v21.common.KeywordThemeInfo keyword_theme = 45 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.KeywordThemeInfoOrBuilder getKeywordThemeOrBuilder();
/**
* <pre>
* Immutable. GLS service campaign criterion.
* </pre>
*
* <code>.google.ads.googleads.v21.common.LocalServiceIdInfo local_service_id = 46 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the localServiceId field is set.
*/
boolean hasLocalServiceId();
/**
* <pre>
* Immutable. GLS service campaign criterion.
* </pre>
*
* <code>.google.ads.googleads.v21.common.LocalServiceIdInfo local_service_id = 46 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The localServiceId.
*/
com.google.ads.googleads.v21.common.LocalServiceIdInfo getLocalServiceId();
/**
* <pre>
* Immutable. GLS service campaign criterion.
* </pre>
*
* <code>.google.ads.googleads.v21.common.LocalServiceIdInfo local_service_id = 46 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.LocalServiceIdInfoOrBuilder getLocalServiceIdOrBuilder();
/**
* <pre>
* Immutable. Brand list campaign criterion.
* </pre>
*
* <code>.google.ads.googleads.v21.common.BrandListInfo brand_list = 47 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the brandList field is set.
*/
boolean hasBrandList();
/**
* <pre>
* Immutable. Brand list campaign criterion.
* </pre>
*
* <code>.google.ads.googleads.v21.common.BrandListInfo brand_list = 47 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The brandList.
*/
com.google.ads.googleads.v21.common.BrandListInfo getBrandList();
/**
* <pre>
* Immutable. Brand list campaign criterion.
* </pre>
*
* <code>.google.ads.googleads.v21.common.BrandListInfo brand_list = 47 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.BrandListInfoOrBuilder getBrandListOrBuilder();
/**
* <pre>
* Immutable. Life event campaign criterion.
* </pre>
*
* <code>.google.ads.googleads.v21.common.LifeEventInfo life_event = 48 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the lifeEvent field is set.
*/
boolean hasLifeEvent();
/**
* <pre>
* Immutable. Life event campaign criterion.
* </pre>
*
* <code>.google.ads.googleads.v21.common.LifeEventInfo life_event = 48 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The lifeEvent.
*/
com.google.ads.googleads.v21.common.LifeEventInfo getLifeEvent();
/**
* <pre>
* Immutable. Life event campaign criterion.
* </pre>
*
* <code>.google.ads.googleads.v21.common.LifeEventInfo life_event = 48 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.LifeEventInfoOrBuilder getLifeEventOrBuilder();
/**
* <pre>
* Immutable. Webpage list.
* This criterion is not publicly available.
* </pre>
*
* <code>.google.ads.googleads.v21.common.WebpageListInfo webpage_list = 49 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the webpageList field is set.
*/
boolean hasWebpageList();
/**
* <pre>
* Immutable. Webpage list.
* This criterion is not publicly available.
* </pre>
*
* <code>.google.ads.googleads.v21.common.WebpageListInfo webpage_list = 49 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The webpageList.
*/
com.google.ads.googleads.v21.common.WebpageListInfo getWebpageList();
/**
* <pre>
* Immutable. Webpage list.
* This criterion is not publicly available.
* </pre>
*
* <code>.google.ads.googleads.v21.common.WebpageListInfo webpage_list = 49 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.WebpageListInfoOrBuilder getWebpageListOrBuilder();
/**
* <pre>
* Immutable. Video lineup criterion.
* </pre>
*
* <code>.google.ads.googleads.v21.common.VideoLineupInfo video_lineup = 50 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the videoLineup field is set.
*/
boolean hasVideoLineup();
/**
* <pre>
* Immutable. Video lineup criterion.
* </pre>
*
* <code>.google.ads.googleads.v21.common.VideoLineupInfo video_lineup = 50 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The videoLineup.
*/
com.google.ads.googleads.v21.common.VideoLineupInfo getVideoLineup();
/**
* <pre>
* Immutable. Video lineup criterion.
* </pre>
*
* <code>.google.ads.googleads.v21.common.VideoLineupInfo video_lineup = 50 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.VideoLineupInfoOrBuilder getVideoLineupOrBuilder();
/**
* <pre>
* Immutable. Extended demographic criterion.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ExtendedDemographicInfo extended_demographic = 52 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return Whether the extendedDemographic field is set.
*/
boolean hasExtendedDemographic();
/**
* <pre>
* Immutable. Extended demographic criterion.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ExtendedDemographicInfo extended_demographic = 52 [(.google.api.field_behavior) = IMMUTABLE];</code>
* @return The extendedDemographic.
*/
com.google.ads.googleads.v21.common.ExtendedDemographicInfo getExtendedDemographic();
/**
* <pre>
* Immutable. Extended demographic criterion.
* </pre>
*
* <code>.google.ads.googleads.v21.common.ExtendedDemographicInfo extended_demographic = 52 [(.google.api.field_behavior) = IMMUTABLE];</code>
*/
com.google.ads.googleads.v21.common.ExtendedDemographicInfoOrBuilder getExtendedDemographicOrBuilder();
com.google.ads.googleads.v21.resources.CampaignCriterion.CriterionCase getCriterionCase();
}
|
hibernate/hibernate-orm | 35,130 | hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/DerbyDialect.java | /*
* SPDX-License-Identifier: Apache-2.0
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.community.dialect;
import jakarta.persistence.TemporalType;
import jakarta.persistence.Timeout;
import org.hibernate.LockOptions;
import org.hibernate.boot.model.FunctionContributions;
import org.hibernate.boot.model.TypeContributions;
import org.hibernate.community.dialect.function.DerbyLpadEmulation;
import org.hibernate.community.dialect.function.DerbyRpadEmulation;
import org.hibernate.community.dialect.pagination.DerbyLimitHandler;
import org.hibernate.community.dialect.sequence.DerbySequenceSupport;
import org.hibernate.community.dialect.sequence.SequenceInformationExtractorDerbyDatabaseImpl;
import org.hibernate.community.dialect.temptable.DerbyLocalTemporaryTableStrategy;
import org.hibernate.dialect.DB2Dialect;
import org.hibernate.dialect.DatabaseVersion;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.DmlTargetColumnQualifierSupport;
import org.hibernate.dialect.NationalizationSupport;
import org.hibernate.dialect.RowLockStrategy;
import org.hibernate.dialect.function.CaseLeastGreatestEmulation;
import org.hibernate.dialect.function.CastingConcatFunction;
import org.hibernate.dialect.function.ChrLiteralEmulation;
import org.hibernate.dialect.function.CommonFunctionFactory;
import org.hibernate.dialect.function.CountFunction;
import org.hibernate.dialect.function.InsertSubstringOverlayEmulation;
import org.hibernate.dialect.identity.DB2IdentityColumnSupport;
import org.hibernate.dialect.identity.IdentityColumnSupport;
import org.hibernate.dialect.lock.internal.LockingSupportSimple;
import org.hibernate.dialect.lock.spi.LockingSupport;
import org.hibernate.dialect.pagination.LimitHandler;
import org.hibernate.dialect.sequence.SequenceSupport;
import org.hibernate.dialect.temptable.TemporaryTableKind;
import org.hibernate.dialect.temptable.TemporaryTableStrategy;
import org.hibernate.dialect.unique.CreateTableUniqueDelegate;
import org.hibernate.dialect.unique.UniqueDelegate;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelper;
import org.hibernate.engine.jdbc.env.spi.IdentifierHelperBuilder;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.exception.ConstraintViolationException;
import org.hibernate.exception.LockTimeoutException;
import org.hibernate.exception.spi.SQLExceptionConversionDelegate;
import org.hibernate.exception.spi.TemplatedViolatedConstraintNameExtractor;
import org.hibernate.exception.spi.ViolatedConstraintNameExtractor;
import org.hibernate.internal.util.JdbcExceptionHelper;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.query.common.TemporalUnit;
import org.hibernate.query.sqm.CastType;
import org.hibernate.query.sqm.IntervalType;
import org.hibernate.query.sqm.mutation.internal.temptable.LocalTemporaryTableInsertStrategy;
import org.hibernate.query.sqm.mutation.internal.temptable.LocalTemporaryTableMutationStrategy;
import org.hibernate.query.sqm.mutation.spi.BeforeUseAction;
import org.hibernate.query.sqm.mutation.spi.SqmMultiTableInsertStrategy;
import org.hibernate.query.sqm.mutation.spi.SqmMultiTableMutationStrategy;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.sql.ast.SqlAstNodeRenderingMode;
import org.hibernate.sql.ast.SqlAstTranslator;
import org.hibernate.sql.ast.SqlAstTranslatorFactory;
import org.hibernate.sql.ast.internal.PessimisticLockKind;
import org.hibernate.sql.ast.spi.LockingClauseStrategy;
import org.hibernate.sql.ast.spi.SqlAppender;
import org.hibernate.sql.ast.spi.StandardSqlAstTranslatorFactory;
import org.hibernate.sql.ast.tree.Statement;
import org.hibernate.sql.exec.spi.JdbcOperation;
import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor;
import org.hibernate.type.BasicType;
import org.hibernate.type.BasicTypeRegistry;
import org.hibernate.type.JavaObjectType;
import org.hibernate.type.StandardBasicTypes;
import org.hibernate.type.descriptor.java.BigDecimalJavaType;
import org.hibernate.type.descriptor.jdbc.ObjectNullResolvingJdbcType;
import org.hibernate.type.descriptor.jdbc.TimestampJdbcType;
import org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry;
import org.hibernate.type.descriptor.sql.internal.CapacityDependentDdlType;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import org.hibernate.type.spi.TypeConfiguration;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Locale;
import static org.hibernate.type.SqlTypes.BINARY;
import static org.hibernate.type.SqlTypes.BLOB;
import static org.hibernate.type.SqlTypes.CHAR;
import static org.hibernate.type.SqlTypes.CLOB;
import static org.hibernate.type.SqlTypes.DECIMAL;
import static org.hibernate.type.SqlTypes.LONG32NVARCHAR;
import static org.hibernate.type.SqlTypes.LONG32VARBINARY;
import static org.hibernate.type.SqlTypes.LONG32VARCHAR;
import static org.hibernate.type.SqlTypes.NCHAR;
import static org.hibernate.type.SqlTypes.NCLOB;
import static org.hibernate.type.SqlTypes.NUMERIC;
import static org.hibernate.type.SqlTypes.NVARCHAR;
import static org.hibernate.type.SqlTypes.TIME;
import static org.hibernate.type.SqlTypes.TIMESTAMP;
import static org.hibernate.type.SqlTypes.TIMESTAMP_WITH_TIMEZONE;
import static org.hibernate.type.SqlTypes.TIME_WITH_TIMEZONE;
import static org.hibernate.type.SqlTypes.TINYINT;
import static org.hibernate.type.SqlTypes.VARBINARY;
import static org.hibernate.type.SqlTypes.VARCHAR;
/**
* A {@linkplain Dialect SQL dialect} for Apache Derby 10.15.2 and above.
*
* @author Simon Johnston
* @author Gavin King
*
*/
public class DerbyDialect extends Dialect {
// KNOWN LIMITATIONS:
// no support for nationalized data (nchar, nvarchar, nclob)
// * limited set of fields for extract()
// (no 'day of xxxx', nor 'week of xxxx')
// * no support for format()
// * pad() can only pad with blanks
// * can't cast String to Binary
// * can't select a parameter unless wrapped
// in a cast or function call
private final static DatabaseVersion MINIMUM_VERSION = DatabaseVersion.make( 10, 15, 2 );
private final LimitHandler limitHandler = new DerbyLimitHandler( true );
private final UniqueDelegate uniqueDelegate = new CreateTableUniqueDelegate(this);
public DerbyDialect() {
this( MINIMUM_VERSION);
}
public DerbyDialect(DatabaseVersion version) {
super(version);
}
public DerbyDialect(DialectResolutionInfo info) {
super(info);
}
@Override
protected DatabaseVersion getMinimumSupportedVersion() {
return MINIMUM_VERSION;
}
@Override
protected String columnType(int sqlTypeCode) {
return switch ( sqlTypeCode ) {
//no tinyint
case TINYINT -> "smallint";
// HHH-12827: map them both to the same type to avoid problems with schema update
// Note that 31 is the maximum precision Derby supports
case NUMERIC -> columnType( DECIMAL );
case VARBINARY -> "varchar($l) for bit data";
case NCHAR -> columnType( CHAR );
case NVARCHAR -> columnType( VARCHAR );
case BLOB -> "blob";
case CLOB, NCLOB -> "clob";
case TIME, TIME_WITH_TIMEZONE -> "time";
case TIMESTAMP, TIMESTAMP_WITH_TIMEZONE -> "timestamp";
default -> super.columnType( sqlTypeCode );
};
}
@Override
protected void registerColumnTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
super.registerColumnTypes( typeContributions, serviceRegistry );
final DdlTypeRegistry ddlTypeRegistry = typeContributions.getTypeConfiguration().getDdlTypeRegistry();
int varcharDdlTypeCapacity = 32_672;
ddlTypeRegistry.addDescriptor(
CapacityDependentDdlType.builder(
VARBINARY,
isLob( LONG32VARBINARY )
? CapacityDependentDdlType.LobKind.BIGGEST_LOB
: CapacityDependentDdlType.LobKind.NONE,
columnType( LONG32VARBINARY ),
columnType( VARBINARY ),
this
)
.withTypeCapacity( varcharDdlTypeCapacity, columnType( VARBINARY ) )
.build()
);
ddlTypeRegistry.addDescriptor(
CapacityDependentDdlType.builder(
VARCHAR,
isLob( LONG32VARCHAR )
? CapacityDependentDdlType.LobKind.BIGGEST_LOB
: CapacityDependentDdlType.LobKind.NONE,
columnType( LONG32VARCHAR ),
columnType( VARCHAR ),
this
)
.withTypeCapacity( varcharDdlTypeCapacity, columnType( VARCHAR ) )
.build()
);
ddlTypeRegistry.addDescriptor(
CapacityDependentDdlType.builder(
NVARCHAR,
isLob( LONG32NVARCHAR )
? CapacityDependentDdlType.LobKind.BIGGEST_LOB
: CapacityDependentDdlType.LobKind.NONE,
columnType( LONG32VARCHAR ),
columnType( NVARCHAR ),
this
)
.withTypeCapacity( varcharDdlTypeCapacity, columnType( NVARCHAR ) )
.build()
);
ddlTypeRegistry.addDescriptor(
CapacityDependentDdlType.builder(
BINARY,
isLob( LONG32VARBINARY )
? CapacityDependentDdlType.LobKind.BIGGEST_LOB
: CapacityDependentDdlType.LobKind.NONE,
columnType( LONG32VARBINARY ),
columnType( VARBINARY ),
this
)
.withTypeCapacity( 254, "char($l) for bit data" )
.withTypeCapacity( varcharDdlTypeCapacity, columnType( VARBINARY ) )
.build()
);
// This is the maximum size for the CHAR datatype on Derby
ddlTypeRegistry.addDescriptor(
CapacityDependentDdlType.builder(
CHAR,
isLob( LONG32VARCHAR )
? CapacityDependentDdlType.LobKind.BIGGEST_LOB
: CapacityDependentDdlType.LobKind.NONE,
columnType( LONG32VARCHAR ),
columnType( CHAR ),
this
)
.withTypeCapacity( 254, columnType( CHAR ) )
.withTypeCapacity( getMaxVarcharLength(), columnType( VARCHAR ) )
.build()
);
ddlTypeRegistry.addDescriptor(
CapacityDependentDdlType.builder(
NCHAR,
isLob( LONG32NVARCHAR )
? CapacityDependentDdlType.LobKind.BIGGEST_LOB
: CapacityDependentDdlType.LobKind.NONE,
columnType( LONG32NVARCHAR ),
columnType( NCHAR ),
this
)
.withTypeCapacity( 254, columnType( NCHAR ) )
.withTypeCapacity( getMaxVarcharLength(), columnType( NVARCHAR ) )
.build()
);
}
@Override
public int getMaxVarcharLength() {
return 32_672;
}
@Override
public int getMaxVarcharCapacity() {
return 32_700;
}
@Override
public int getDefaultDecimalPrecision() {
//this is the maximum allowed in Derby
return 31;
}
@Override
public NationalizationSupport getNationalizationSupport() {
return NationalizationSupport.IMPLICIT;
}
@Override
public int getDefaultStatementBatchSize() {
return 15;
}
@Override
public int getFloatPrecision() {
return 23;
}
@Override
public int getDoublePrecision() {
return 52;
}
@Override
public int getDefaultTimestampPrecision() {
return 9;
}
@Override
public void initializeFunctionRegistry(FunctionContributions functionContributions) {
super.initializeFunctionRegistry(functionContributions);
final BasicTypeRegistry basicTypeRegistry = functionContributions.getTypeConfiguration().getBasicTypeRegistry();
final BasicType<String> stringType = basicTypeRegistry.resolve( StandardBasicTypes.STRING );
final DdlTypeRegistry ddlTypeRegistry = functionContributions.getTypeConfiguration().getDdlTypeRegistry();
final CommonFunctionFactory functionFactory = new CommonFunctionFactory(functionContributions);
// Derby needs an actual argument type for aggregates like SUM, AVG, MIN, MAX to determine the result type
functionFactory.aggregates( this, SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
functionContributions.getFunctionRegistry().register(
"count",
new CountFunction(
this,
functionContributions.getTypeConfiguration(),
SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER,
"||",
ddlTypeRegistry.getDescriptor( VARCHAR )
.getCastTypeName( Size.nil(), stringType, ddlTypeRegistry ),
true
)
);
// AVG by default uses the input type, so we possibly need to cast the argument type, hence a special function
functionFactory.avg_castingNonDoubleArguments( this, SqlAstNodeRenderingMode.DEFAULT );
// Note that Derby does not have chr() / ascii() functions.
// It does have a function named char(), but it's really a
// sort of to_char() function.
// We register an emulation instead, that can at least translate integer literals
functionContributions.getFunctionRegistry().register(
"chr",
new ChrLiteralEmulation( functionContributions.getTypeConfiguration() )
);
functionFactory.concat_pipeOperator();
functionFactory.cot();
functionFactory.degrees();
functionFactory.radians();
functionFactory.log10();
functionFactory.sinh();
functionFactory.cosh();
functionFactory.tanh();
functionFactory.pi();
functionFactory.rand();
functionFactory.trim1();
functionFactory.hourMinuteSecond();
functionFactory.yearMonthDay();
functionFactory.varPopSamp();
functionFactory.stddevPopSamp();
functionFactory.substring_substr();
functionFactory.leftRight_substrLength();
functionFactory.characterLength_length( SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
functionFactory.power_expLn();
functionFactory.round_floor();
functionFactory.trunc_floor();
functionFactory.octetLength_pattern( "length(?1)", SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
functionFactory.bitLength_pattern( "length(?1)*8", SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER );
functionContributions.getFunctionRegistry().register(
"concat",
new CastingConcatFunction(
this,
"||",
true,
SqlAstNodeRenderingMode.NO_PLAIN_PARAMETER,
functionContributions.getTypeConfiguration()
)
);
//no way I can see to pad with anything other than spaces
functionContributions.getFunctionRegistry().register( "lpad", new DerbyLpadEmulation( functionContributions.getTypeConfiguration() ) );
functionContributions.getFunctionRegistry().register( "rpad", new DerbyRpadEmulation( functionContributions.getTypeConfiguration() ) );
functionContributions.getFunctionRegistry().register( "least", new CaseLeastGreatestEmulation( true ) );
functionContributions.getFunctionRegistry().register( "greatest", new CaseLeastGreatestEmulation( false ) );
functionContributions.getFunctionRegistry().register( "overlay", new InsertSubstringOverlayEmulation( functionContributions.getTypeConfiguration(), true ) );
}
@Override
public SqlAstTranslatorFactory getSqlAstTranslatorFactory() {
return new StandardSqlAstTranslatorFactory() {
@Override
protected <T extends JdbcOperation> SqlAstTranslator<T> buildTranslator(
SessionFactoryImplementor sessionFactory, Statement statement) {
return new DerbySqlAstTranslator<>( sessionFactory, statement );
}
};
}
/**
* Derby doesn't have an extract() function, and has
* no functions at all for calendaring, but we can
* emulate the most basic functionality of extract()
* using the functions it does have.
* <p>
* The only supported {@link TemporalUnit}s are:
* {@link TemporalUnit#YEAR},
* {@link TemporalUnit#MONTH}
* {@link TemporalUnit#DAY},
* {@link TemporalUnit#HOUR},
* {@link TemporalUnit#MINUTE},
* {@link TemporalUnit#SECOND} (along with
* {@link TemporalUnit#NANOSECOND},
* {@link TemporalUnit#DATE}, and
* {@link TemporalUnit#TIME}, which are desugared
* by the parser).
*/
@Override
public String extractPattern(TemporalUnit unit) {
switch (unit) {
case DAY_OF_MONTH:
return "day(?2)";
case DAY_OF_YEAR:
return "({fn timestampdiff(sql_tsi_day,date(char(year(?2),4)||'-01-01'),?2)}+1)";
case DAY_OF_WEEK:
// Use the approach as outlined here: https://stackoverflow.com/questions/36357013/day-of-week-from-seconds-since-epoch
return "(mod(mod({fn timestampdiff(sql_tsi_day,{d '1970-01-01'},?2)}+4,7)+7,7)+1)";
case WEEK:
// Use the approach as outlined here: https://www.sqlservercentral.com/articles/a-simple-formula-to-calculate-the-iso-week-number
// In SQL Server terms this is (DATEPART(dy,DATEADD(dd,DATEDIFF(dd,'17530101',@SomeDate)/7*7,'17530104'))+6)/7
return "(({fn timestampdiff(sql_tsi_day,date(char(year(?2),4)||'-01-01'),{fn timestampadd(sql_tsi_day,{fn timestampdiff(sql_tsi_day,{d '1753-01-01'},?2)}/7*7,{d '1753-01-04'})})}+7)/7)";
case QUARTER:
return "((month(?2)+2)/3)";
case EPOCH:
return "{fn timestampdiff(sql_tsi_second,{ts '1970-01-01 00:00:00'},?2)}";
default:
return "?1(?2)";
}
}
@Override
public String translateExtractField(TemporalUnit unit) {
switch (unit) {
case WEEK:
case DAY_OF_YEAR:
case DAY_OF_WEEK:
throw new UnsupportedOperationException("field type not supported on Derby: " + unit);
case DAY_OF_MONTH:
return "day";
default:
return super.translateExtractField(unit);
}
}
/**
* Derby does have a real {@link Types#BOOLEAN}
* type, but it doesn't know how to cast to it. Worse,
* Derby makes us use the {@code double()} function to
* cast things to its floating point types.
*/
@Override
public String castPattern(CastType from, CastType to) {
switch ( to ) {
case FLOAT:
return "cast(double(?1) as real)";
case DOUBLE:
return "double(?1)";
case STRING:
// Derby madness http://db.apache.org/derby/docs/10.8/ref/rrefsqlj33562.html
// With a nice rant: https://blog.jooq.org/2011/10/29/derby-casting-madness-the-sequel/
// See https://issues.apache.org/jira/browse/DERBY-2072
// Since numerics can't be cast to varchar directly, use char(254) i.e. with the maximum char capacity
// as an intermediate type before converting to varchar
switch ( from ) {
case FLOAT:
case DOUBLE:
// Derby can't cast to char directly, but needs to be cast to decimal first...
return "cast(trim(cast(cast(?1 as decimal("
+ getDefaultDecimalPrecision() + ","
+ BigDecimalJavaType.INSTANCE.getDefaultSqlScale( this, null )
+ ")) as char(254))) as ?2)";
case INTEGER:
case LONG:
case FIXED:
return "cast(trim(cast(?1 as char(254))) as ?2)";
case DATE:
// The maximum length of a date
return "cast(?1 as varchar(10))";
case TIME:
// The maximum length of a time
return "cast(?1 as varchar(8))";
case TIMESTAMP:
// The maximum length of a timestamp
return "cast(?1 as varchar(30))";
}
break;
}
return super.castPattern( from, to );
}
@Override
public String timestampaddPattern(TemporalUnit unit, TemporalType temporalType, IntervalType intervalType) {
switch (unit) {
case NANOSECOND:
case NATIVE:
return "{fn timestampadd(sql_tsi_frac_second,mod(bigint(?2),1000000000),{fn timestampadd(sql_tsi_second,bigint((?2)/1000000000),?3)})}";
default:
final String addExpression = "{fn timestampadd(sql_tsi_?1,bigint(?2),?3)}";
// Since timestampadd will always produce a TIMESTAMP, we have to cast back to the intended type
return temporalType == TemporalType.TIMESTAMP
? addExpression
: "cast(" + addExpression + " as " + temporalType.name().toLowerCase( Locale.ROOT ) + ")" ;
}
}
@Override
public String timestampdiffPattern(TemporalUnit unit, TemporalType fromTemporalType, TemporalType toTemporalType) {
switch (unit) {
case NANOSECOND:
case NATIVE:
return "{fn timestampdiff(sql_tsi_frac_second,?2,?3)}";
default:
return "{fn timestampdiff(sql_tsi_?1,?2,?3)}";
}
}
@Override
public void appendBooleanValueString(SqlAppender appender, boolean bool) {
appender.appendSql( bool );
}
@Override
public SequenceSupport getSequenceSupport() {
return DerbySequenceSupport.INSTANCE;
}
@Override
public String getQuerySequencesString() {
return "select sys.sysschemas.schemaname as sequence_schema,sys.syssequences.* from sys.syssequences left join sys.sysschemas on sys.syssequences.schemaid=sys.sysschemas.schemaid";
}
@Override
public SequenceInformationExtractor getSequenceInformationExtractor() {
return SequenceInformationExtractorDerbyDatabaseImpl.INSTANCE;
}
@Override
public String[] getDropSchemaCommand(String schemaName) {
return new String[] {"drop schema " + schemaName + " restrict"};
}
@Override
public String getSelectClauseNullString(int sqlType, TypeConfiguration typeConfiguration) {
return DB2Dialect.selectNullString( sqlType );
}
@Override
public boolean supportsCommentOn() {
//HHH-4531
return false;
}
@Override
public LockingSupport getLockingSupport() {
return LockingSupportSimple.NO_OUTER_JOIN;
}
@Override
protected LockingClauseStrategy buildLockingClauseStrategy(
PessimisticLockKind lockKind,
RowLockStrategy rowLockStrategy,
LockOptions lockOptions) {
return new DerbyLockingClauseStrategy( this, lockKind, rowLockStrategy, lockOptions );
}
@Override
public String getForUpdateString() {
return " for update with rs";
}
@Override
public String getWriteLockString(Timeout timeout) {
return " for update with rs";
}
@Override
public String getReadLockString(Timeout timeout) {
return " for read only with rs";
}
@Override
public String getWriteLockString(int timeout) {
return " for update with rs";
}
@Override
public String getReadLockString(int timeout) {
return " for read only with rs";
}
@Override
public boolean supportsExistsInSelect() {
//TODO: check this!
return false;
}
@Override
public boolean supportsCurrentTimestampSelection() {
return true;
}
@Override
public String getCurrentTimestampSelectString() {
return "values current timestamp";
}
@Override
public boolean isCurrentTimestampSelectStringCallable() {
return false;
}
@Override
public LimitHandler getLimitHandler() {
return limitHandler;
}
@Override
public IdentityColumnSupport getIdentityColumnSupport() {
return DB2IdentityColumnSupport.INSTANCE;
}
@Override
public boolean doesReadCommittedCauseWritersToBlockReaders() {
//TODO: check this
return true;
}
@Override
public boolean supportsResultSetPositionQueryMethodsOnForwardOnlyCursor() {
return false;
}
@Override
public boolean supportsTupleDistinctCounts() {
//checked on Derby 10.14
return false;
}
@Override
public boolean supportsOrderByInSubquery() {
// As of version 10.5 Derby supports OFFSET and FETCH as well as ORDER BY in subqueries
return true;
}
@Override
public boolean requiresCastForConcatenatingNonStrings() {
return true;
}
@Override
public void contributeTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
super.contributeTypes( typeContributions, serviceRegistry );
final JdbcTypeRegistry jdbcTypeRegistry = typeContributions.getTypeConfiguration()
.getJdbcTypeRegistry();
jdbcTypeRegistry.addDescriptor( Types.TIMESTAMP_WITH_TIMEZONE, TimestampJdbcType.INSTANCE );
// Derby requires a custom binder for binding untyped nulls that resolves the type through the statement
typeContributions.contributeJdbcType( ObjectNullResolvingJdbcType.INSTANCE );
// Until we remove StandardBasicTypes, we have to keep this
typeContributions.contributeType(
new JavaObjectType(
ObjectNullResolvingJdbcType.INSTANCE,
typeContributions.getTypeConfiguration()
.getJavaTypeRegistry()
.getDescriptor( Object.class )
)
);
}
// Overridden informational metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@Override
public boolean supportsLobValueChangePropagation() {
return false;
}
@Override
public boolean supportsUnboundedLobLocatorMaterialization() {
return false;
}
@Override
public int getInExpressionCountLimit() {
// Derby does not have a limit on the number of expressions/parameters per-se (it may, I just
// don't know). It does, however, have a limit on the size of the SQL text it will accept as a
// PreparedStatement; so let's limit this to a sensible value to avoid that.
return 512;
}
@Override
public ViolatedConstraintNameExtractor getViolatedConstraintNameExtractor() {
return new TemplatedViolatedConstraintNameExtractor( sqle -> {
final String sqlState = JdbcExceptionHelper.extractSqlState( sqle );
if ( sqlState != null ) {
switch ( sqlState ) {
case "23505":
return TemplatedViolatedConstraintNameExtractor.extractUsingTemplate(
"'", "'",
sqle.getMessage()
);
}
}
return null;
} );
}
@Override
public SQLExceptionConversionDelegate buildSQLExceptionConversionDelegate() {
return (sqlException, message, sql) -> {
final String sqlState = JdbcExceptionHelper.extractSqlState( sqlException );
// final int errorCode = JdbcExceptionHelper.extractErrorCode( sqlException );
final String constraintName;
if ( sqlState != null ) {
switch ( sqlState ) {
case "23505":
// Unique constraint violation
constraintName = getViolatedConstraintNameExtractor().extractConstraintName(sqlException);
return new ConstraintViolationException(
message,
sqlException,
sql,
ConstraintViolationException.ConstraintKind.UNIQUE,
constraintName
);
case "40XL1":
case "40XL2":
return new LockTimeoutException( message, sqlException, sql );
}
}
return null;
};
}
@Override
public void appendDatetimeFormat(SqlAppender appender, String format) {
throw new UnsupportedOperationException("format() function not supported on Derby");
}
@Override
protected void registerDefaultKeywords() {
super.registerDefaultKeywords();
registerKeyword( "ADD" );
registerKeyword( "ALL" );
registerKeyword( "ALLOCATE" );
registerKeyword( "ALTER" );
registerKeyword( "AND" );
registerKeyword( "ANY" );
registerKeyword( "ARE" );
registerKeyword( "AS" );
registerKeyword( "ASC" );
registerKeyword( "ASSERTION" );
registerKeyword( "AT" );
registerKeyword( "AUTHORIZATION" );
registerKeyword( "AVG" );
registerKeyword( "BEGIN" );
registerKeyword( "BETWEEN" );
registerKeyword( "BIT" );
registerKeyword( "BOOLEAN" );
registerKeyword( "BOTH" );
registerKeyword( "BY" );
registerKeyword( "CALL" );
registerKeyword( "CASCADE" );
registerKeyword( "CASCADED" );
registerKeyword( "CASE" );
registerKeyword( "CAST" );
registerKeyword( "CHAR" );
registerKeyword( "CHARACTER" );
registerKeyword( "CHECK" );
registerKeyword( "CLOSE" );
registerKeyword( "COLLATE" );
registerKeyword( "COLLATION" );
registerKeyword( "COLUMN" );
registerKeyword( "COMMIT" );
registerKeyword( "CONNECT" );
registerKeyword( "CONNECTION" );
registerKeyword( "CONSTRAINT" );
registerKeyword( "CONSTRAINTS" );
registerKeyword( "CONTINUE" );
registerKeyword( "CONVERT" );
registerKeyword( "CORRESPONDING" );
registerKeyword( "COUNT" );
registerKeyword( "CREATE" );
registerKeyword( "CURRENT" );
registerKeyword( "CURRENT_DATE" );
registerKeyword( "CURRENT_TIME" );
registerKeyword( "CURRENT_TIMESTAMP" );
registerKeyword( "CURRENT_USER" );
registerKeyword( "CURSOR" );
registerKeyword( "DEALLOCATE" );
registerKeyword( "DEC" );
registerKeyword( "DECIMAL" );
registerKeyword( "DECLARE" );
registerKeyword( "DEFERRABLE" );
registerKeyword( "DEFERRED" );
registerKeyword( "DELETE" );
registerKeyword( "DESC" );
registerKeyword( "DESCRIBE" );
registerKeyword( "DIAGNOSTICS" );
registerKeyword( "DISCONNECT" );
registerKeyword( "DISTINCT" );
registerKeyword( "DOUBLE" );
registerKeyword( "DROP" );
registerKeyword( "ELSE" );
registerKeyword( "END" );
registerKeyword( "ENDEXEC" );
registerKeyword( "ESCAPE" );
registerKeyword( "EXCEPT" );
registerKeyword( "EXCEPTION" );
registerKeyword( "EXEC" );
registerKeyword( "EXECUTE" );
registerKeyword( "EXISTS" );
registerKeyword( "EXPLAIN" );
registerKeyword( "EXTERNAL" );
registerKeyword( "FALSE" );
registerKeyword( "FETCH" );
registerKeyword( "FIRST" );
registerKeyword( "FLOAT" );
registerKeyword( "FOR" );
registerKeyword( "FOREIGN" );
registerKeyword( "FOUND" );
registerKeyword( "FROM" );
registerKeyword( "FULL" );
registerKeyword( "FUNCTION" );
registerKeyword( "GET" );
registerKeyword( "GET_CURRENT_CONNECTION" );
registerKeyword( "GLOBAL" );
registerKeyword( "GO" );
registerKeyword( "GOTO" );
registerKeyword( "GRANT" );
registerKeyword( "GROUP" );
registerKeyword( "HAVING" );
registerKeyword( "HOUR" );
registerKeyword( "IDENTITY" );
registerKeyword( "IMMEDIATE" );
registerKeyword( "IN" );
registerKeyword( "INDICATOR" );
registerKeyword( "INITIALLY" );
registerKeyword( "INNER" );
registerKeyword( "INOUT" );
registerKeyword( "INPUT" );
registerKeyword( "INSENSITIVE" );
registerKeyword( "INSERT" );
registerKeyword( "INT" );
registerKeyword( "INTEGER" );
registerKeyword( "INTERSECT" );
registerKeyword( "INTO" );
registerKeyword( "IS" );
registerKeyword( "ISOLATION" );
registerKeyword( "JOIN" );
registerKeyword( "KEY" );
registerKeyword( "LAST" );
registerKeyword( "LEFT" );
registerKeyword( "LIKE" );
registerKeyword( "LONGINT" );
registerKeyword( "LOWER" );
registerKeyword( "LTRIM" );
registerKeyword( "MATCH" );
registerKeyword( "MAX" );
registerKeyword( "MIN" );
registerKeyword( "MINUTE" );
registerKeyword( "NATIONAL" );
registerKeyword( "NATURAL" );
registerKeyword( "NCHAR" );
registerKeyword( "NVARCHAR" );
registerKeyword( "NEXT" );
registerKeyword( "NO" );
registerKeyword( "NOT" );
registerKeyword( "NULL" );
registerKeyword( "NULLIF" );
registerKeyword( "NUMERIC" );
registerKeyword( "OF" );
registerKeyword( "ON" );
registerKeyword( "ONLY" );
registerKeyword( "OPEN" );
registerKeyword( "OPTION" );
registerKeyword( "OR" );
registerKeyword( "ORDER" );
registerKeyword( "OUT" );
registerKeyword( "OUTER" );
registerKeyword( "OUTPUT" );
registerKeyword( "OVERLAPS" );
registerKeyword( "PAD" );
registerKeyword( "PARTIAL" );
registerKeyword( "PREPARE" );
registerKeyword( "PRESERVE" );
registerKeyword( "PRIMARY" );
registerKeyword( "PRIOR" );
registerKeyword( "PRIVILEGES" );
registerKeyword( "PROCEDURE" );
registerKeyword( "PUBLIC" );
registerKeyword( "READ" );
registerKeyword( "REAL" );
registerKeyword( "REFERENCES" );
registerKeyword( "RELATIVE" );
registerKeyword( "RESTRICT" );
registerKeyword( "REVOKE" );
registerKeyword( "RIGHT" );
registerKeyword( "ROLLBACK" );
registerKeyword( "ROWS" );
registerKeyword( "RTRIM" );
registerKeyword( "SCHEMA" );
registerKeyword( "SCROLL" );
registerKeyword( "SECOND" );
registerKeyword( "SELECT" );
registerKeyword( "SESSION_USER" );
registerKeyword( "SET" );
registerKeyword( "SMALLINT" );
registerKeyword( "SOME" );
registerKeyword( "SPACE" );
registerKeyword( "SQL" );
registerKeyword( "SQLCODE" );
registerKeyword( "SQLERROR" );
registerKeyword( "SQLSTATE" );
registerKeyword( "SUBSTR" );
registerKeyword( "SUBSTRING" );
registerKeyword( "SUM" );
registerKeyword( "SYSTEM_USER" );
registerKeyword( "TABLE" );
registerKeyword( "TEMPORARY" );
registerKeyword( "TIMEZONE_HOUR" );
registerKeyword( "TIMEZONE_MINUTE" );
registerKeyword( "TO" );
registerKeyword( "TRAILING" );
registerKeyword( "TRANSACTION" );
registerKeyword( "TRANSLATE" );
registerKeyword( "TRANSLATION" );
registerKeyword( "TRUE" );
registerKeyword( "UNION" );
registerKeyword( "UNIQUE" );
registerKeyword( "UNKNOWN" );
registerKeyword( "UPDATE" );
registerKeyword( "UPPER" );
registerKeyword( "USER" );
registerKeyword( "USING" );
registerKeyword( "VALUES" );
registerKeyword( "VARCHAR" );
registerKeyword( "VARYING" );
registerKeyword( "VIEW" );
registerKeyword( "WHENEVER" );
registerKeyword( "WHERE" );
registerKeyword( "WITH" );
registerKeyword( "WORK" );
registerKeyword( "WRITE" );
registerKeyword( "XML" );
registerKeyword( "XMLEXISTS" );
registerKeyword( "XMLPARSE" );
registerKeyword( "XMLSERIALIZE" );
registerKeyword( "YEAR" );
}
@Override
public SqmMultiTableMutationStrategy getFallbackSqmMutationStrategy(
EntityMappingType rootEntityDescriptor,
RuntimeModelCreationContext runtimeModelCreationContext) {
return new LocalTemporaryTableMutationStrategy( rootEntityDescriptor, runtimeModelCreationContext );
}
@Override
public SqmMultiTableInsertStrategy getFallbackSqmInsertStrategy(
EntityMappingType rootEntityDescriptor,
RuntimeModelCreationContext runtimeModelCreationContext) {
return new LocalTemporaryTableInsertStrategy( rootEntityDescriptor, runtimeModelCreationContext );
}
@Override
public TemporaryTableKind getSupportedTemporaryTableKind() {
return TemporaryTableKind.LOCAL;
}
@Override
public TemporaryTableStrategy getLocalTemporaryTableStrategy() {
return DerbyLocalTemporaryTableStrategy.INSTANCE;
}
@Override
public String getTemporaryTableCreateOptions() {
return DerbyLocalTemporaryTableStrategy.INSTANCE.getTemporaryTableCreateOptions();
}
@Override
public String getTemporaryTableCreateCommand() {
return DerbyLocalTemporaryTableStrategy.INSTANCE.getTemporaryTableCreateCommand();
}
@Override
public BeforeUseAction getTemporaryTableBeforeUseAction() {
return DerbyLocalTemporaryTableStrategy.INSTANCE.getTemporaryTableBeforeUseAction();
}
@Override
public boolean supportsTemporaryTablePrimaryKey() {
return DerbyLocalTemporaryTableStrategy.INSTANCE.supportsTemporaryTablePrimaryKey();
}
@Override
public boolean supportsPartitionBy() {
return false;
}
@Override
public boolean supportsWindowFunctions() {
// It seems at least the row_number function is supported as of 10.4
return true;
}
@Override
public boolean supportsValuesList() {
return true;
}
@Override
public IdentifierHelper buildIdentifierHelper(IdentifierHelperBuilder builder, DatabaseMetaData metadata)
throws SQLException {
builder.setAutoQuoteInitialUnderscore(true);
return super.buildIdentifierHelper(builder, metadata );
}
@Override
public UniqueDelegate getUniqueDelegate() {
return uniqueDelegate;
}
@Override
public DmlTargetColumnQualifierSupport getDmlTargetColumnQualifierSupport() {
return DmlTargetColumnQualifierSupport.TABLE_ALIAS;
}
@Override
public String getDual() {
return "(values 0)";
}
@Override
public String getFromDualForSelectOnly() {
return " from " + getDual() + " dual";
}
@Override
public boolean supportsJoinInMutationStatementSubquery() {
return false;
}
@Override
public boolean supportsRowValueConstructorSyntax() {
return false;
}
@Override
public boolean supportsWithClause() {
return false;
}
@Override
public boolean supportsRowValueConstructorSyntaxInQuantifiedPredicates() {
return false;
}
@Override
public boolean supportsRowValueConstructorSyntaxInInList() {
return false;
}
}
|
googleapis/google-cloud-java | 37,578 | java-orchestration-airflow/proto-google-cloud-orchestration-airflow-v1beta1/src/main/java/com/google/cloud/orchestration/airflow/service/v1beta1/ExecuteAirflowCommandResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/orchestration/airflow/service/v1beta1/environments.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.orchestration.airflow.service.v1beta1;
/**
*
*
* <pre>
* Response to ExecuteAirflowCommandRequest.
* </pre>
*
* Protobuf type {@code
* google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse}
*/
public final class ExecuteAirflowCommandResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse)
ExecuteAirflowCommandResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ExecuteAirflowCommandResponse.newBuilder() to construct.
private ExecuteAirflowCommandResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ExecuteAirflowCommandResponse() {
executionId_ = "";
pod_ = "";
podNamespace_ = "";
error_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ExecuteAirflowCommandResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.orchestration.airflow.service.v1beta1.EnvironmentsOuterClass
.internal_static_google_cloud_orchestration_airflow_service_v1beta1_ExecuteAirflowCommandResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.orchestration.airflow.service.v1beta1.EnvironmentsOuterClass
.internal_static_google_cloud_orchestration_airflow_service_v1beta1_ExecuteAirflowCommandResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
.class,
com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
.Builder.class);
}
public static final int EXECUTION_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object executionId_ = "";
/**
*
*
* <pre>
* The unique ID of the command execution for polling.
* </pre>
*
* <code>string execution_id = 1;</code>
*
* @return The executionId.
*/
@java.lang.Override
public java.lang.String getExecutionId() {
java.lang.Object ref = executionId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
executionId_ = s;
return s;
}
}
/**
*
*
* <pre>
* The unique ID of the command execution for polling.
* </pre>
*
* <code>string execution_id = 1;</code>
*
* @return The bytes for executionId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getExecutionIdBytes() {
java.lang.Object ref = executionId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
executionId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int POD_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object pod_ = "";
/**
*
*
* <pre>
* The name of the pod where the command is executed.
* </pre>
*
* <code>string pod = 2;</code>
*
* @return The pod.
*/
@java.lang.Override
public java.lang.String getPod() {
java.lang.Object ref = pod_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pod_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the pod where the command is executed.
* </pre>
*
* <code>string pod = 2;</code>
*
* @return The bytes for pod.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPodBytes() {
java.lang.Object ref = pod_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pod_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int POD_NAMESPACE_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object podNamespace_ = "";
/**
*
*
* <pre>
* The namespace of the pod where the command is executed.
* </pre>
*
* <code>string pod_namespace = 3;</code>
*
* @return The podNamespace.
*/
@java.lang.Override
public java.lang.String getPodNamespace() {
java.lang.Object ref = podNamespace_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
podNamespace_ = s;
return s;
}
}
/**
*
*
* <pre>
* The namespace of the pod where the command is executed.
* </pre>
*
* <code>string pod_namespace = 3;</code>
*
* @return The bytes for podNamespace.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPodNamespaceBytes() {
java.lang.Object ref = podNamespace_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
podNamespace_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ERROR_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object error_ = "";
/**
*
*
* <pre>
* Error message. Empty if there was no error.
* </pre>
*
* <code>string error = 4;</code>
*
* @return The error.
*/
@java.lang.Override
public java.lang.String getError() {
java.lang.Object ref = error_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
error_ = s;
return s;
}
}
/**
*
*
* <pre>
* Error message. Empty if there was no error.
* </pre>
*
* <code>string error = 4;</code>
*
* @return The bytes for error.
*/
@java.lang.Override
public com.google.protobuf.ByteString getErrorBytes() {
java.lang.Object ref = error_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
error_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(executionId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, executionId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pod_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, pod_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(podNamespace_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, podNamespace_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(error_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, error_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(executionId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, executionId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pod_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, pod_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(podNamespace_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, podNamespace_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(error_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, error_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof
com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse)) {
return super.equals(obj);
}
com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse other =
(com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse) obj;
if (!getExecutionId().equals(other.getExecutionId())) return false;
if (!getPod().equals(other.getPod())) return false;
if (!getPodNamespace().equals(other.getPodNamespace())) return false;
if (!getError().equals(other.getError())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + EXECUTION_ID_FIELD_NUMBER;
hash = (53 * hash) + getExecutionId().hashCode();
hash = (37 * hash) + POD_FIELD_NUMBER;
hash = (53 * hash) + getPod().hashCode();
hash = (37 * hash) + POD_NAMESPACE_FIELD_NUMBER;
hash = (53 * hash) + getPodNamespace().hashCode();
hash = (37 * hash) + ERROR_FIELD_NUMBER;
hash = (53 * hash) + getError().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response to ExecuteAirflowCommandRequest.
* </pre>
*
* Protobuf type {@code
* google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse)
com.google.cloud.orchestration.airflow.service.v1beta1
.ExecuteAirflowCommandResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.orchestration.airflow.service.v1beta1.EnvironmentsOuterClass
.internal_static_google_cloud_orchestration_airflow_service_v1beta1_ExecuteAirflowCommandResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.orchestration.airflow.service.v1beta1.EnvironmentsOuterClass
.internal_static_google_cloud_orchestration_airflow_service_v1beta1_ExecuteAirflowCommandResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
.class,
com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
.Builder.class);
}
// Construct using
// com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
executionId_ = "";
pod_ = "";
podNamespace_ = "";
error_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.orchestration.airflow.service.v1beta1.EnvironmentsOuterClass
.internal_static_google_cloud_orchestration_airflow_service_v1beta1_ExecuteAirflowCommandResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
getDefaultInstanceForType() {
return com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
build() {
com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
buildPartial() {
com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse result =
new com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse(
this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.executionId_ = executionId_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pod_ = pod_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.podNamespace_ = podNamespace_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.error_ = error_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof
com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse) {
return mergeFrom(
(com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse)
other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
other) {
if (other
== com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
.getDefaultInstance()) return this;
if (!other.getExecutionId().isEmpty()) {
executionId_ = other.executionId_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getPod().isEmpty()) {
pod_ = other.pod_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getPodNamespace().isEmpty()) {
podNamespace_ = other.podNamespace_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getError().isEmpty()) {
error_ = other.error_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
executionId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
pod_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
podNamespace_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
error_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object executionId_ = "";
/**
*
*
* <pre>
* The unique ID of the command execution for polling.
* </pre>
*
* <code>string execution_id = 1;</code>
*
* @return The executionId.
*/
public java.lang.String getExecutionId() {
java.lang.Object ref = executionId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
executionId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The unique ID of the command execution for polling.
* </pre>
*
* <code>string execution_id = 1;</code>
*
* @return The bytes for executionId.
*/
public com.google.protobuf.ByteString getExecutionIdBytes() {
java.lang.Object ref = executionId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
executionId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The unique ID of the command execution for polling.
* </pre>
*
* <code>string execution_id = 1;</code>
*
* @param value The executionId to set.
* @return This builder for chaining.
*/
public Builder setExecutionId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
executionId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The unique ID of the command execution for polling.
* </pre>
*
* <code>string execution_id = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearExecutionId() {
executionId_ = getDefaultInstance().getExecutionId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The unique ID of the command execution for polling.
* </pre>
*
* <code>string execution_id = 1;</code>
*
* @param value The bytes for executionId to set.
* @return This builder for chaining.
*/
public Builder setExecutionIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
executionId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object pod_ = "";
/**
*
*
* <pre>
* The name of the pod where the command is executed.
* </pre>
*
* <code>string pod = 2;</code>
*
* @return The pod.
*/
public java.lang.String getPod() {
java.lang.Object ref = pod_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pod_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the pod where the command is executed.
* </pre>
*
* <code>string pod = 2;</code>
*
* @return The bytes for pod.
*/
public com.google.protobuf.ByteString getPodBytes() {
java.lang.Object ref = pod_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pod_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the pod where the command is executed.
* </pre>
*
* <code>string pod = 2;</code>
*
* @param value The pod to set.
* @return This builder for chaining.
*/
public Builder setPod(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pod_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the pod where the command is executed.
* </pre>
*
* <code>string pod = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPod() {
pod_ = getDefaultInstance().getPod();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the pod where the command is executed.
* </pre>
*
* <code>string pod = 2;</code>
*
* @param value The bytes for pod to set.
* @return This builder for chaining.
*/
public Builder setPodBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pod_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object podNamespace_ = "";
/**
*
*
* <pre>
* The namespace of the pod where the command is executed.
* </pre>
*
* <code>string pod_namespace = 3;</code>
*
* @return The podNamespace.
*/
public java.lang.String getPodNamespace() {
java.lang.Object ref = podNamespace_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
podNamespace_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The namespace of the pod where the command is executed.
* </pre>
*
* <code>string pod_namespace = 3;</code>
*
* @return The bytes for podNamespace.
*/
public com.google.protobuf.ByteString getPodNamespaceBytes() {
java.lang.Object ref = podNamespace_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
podNamespace_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The namespace of the pod where the command is executed.
* </pre>
*
* <code>string pod_namespace = 3;</code>
*
* @param value The podNamespace to set.
* @return This builder for chaining.
*/
public Builder setPodNamespace(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
podNamespace_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The namespace of the pod where the command is executed.
* </pre>
*
* <code>string pod_namespace = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPodNamespace() {
podNamespace_ = getDefaultInstance().getPodNamespace();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The namespace of the pod where the command is executed.
* </pre>
*
* <code>string pod_namespace = 3;</code>
*
* @param value The bytes for podNamespace to set.
* @return This builder for chaining.
*/
public Builder setPodNamespaceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
podNamespace_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object error_ = "";
/**
*
*
* <pre>
* Error message. Empty if there was no error.
* </pre>
*
* <code>string error = 4;</code>
*
* @return The error.
*/
public java.lang.String getError() {
java.lang.Object ref = error_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
error_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Error message. Empty if there was no error.
* </pre>
*
* <code>string error = 4;</code>
*
* @return The bytes for error.
*/
public com.google.protobuf.ByteString getErrorBytes() {
java.lang.Object ref = error_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
error_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Error message. Empty if there was no error.
* </pre>
*
* <code>string error = 4;</code>
*
* @param value The error to set.
* @return This builder for chaining.
*/
public Builder setError(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
error_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Error message. Empty if there was no error.
* </pre>
*
* <code>string error = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearError() {
error_ = getDefaultInstance().getError();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Error message. Empty if there was no error.
* </pre>
*
* <code>string error = 4;</code>
*
* @param value The bytes for error to set.
* @return This builder for chaining.
*/
public Builder setErrorBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
error_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse)
private static final com.google.cloud.orchestration.airflow.service.v1beta1
.ExecuteAirflowCommandResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse();
}
public static com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ExecuteAirflowCommandResponse> PARSER =
new com.google.protobuf.AbstractParser<ExecuteAirflowCommandResponse>() {
@java.lang.Override
public ExecuteAirflowCommandResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ExecuteAirflowCommandResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ExecuteAirflowCommandResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.orchestration.airflow.service.v1beta1.ExecuteAirflowCommandResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,715 | java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3/src/main/java/com/google/cloud/dialogflow/cx/v3/ListIntentsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3/intent.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.cx.v3;
/**
*
*
* <pre>
* The response message for
* [Intents.ListIntents][google.cloud.dialogflow.cx.v3.Intents.ListIntents].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3.ListIntentsResponse}
*/
public final class ListIntentsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3.ListIntentsResponse)
ListIntentsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListIntentsResponse.newBuilder() to construct.
private ListIntentsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListIntentsResponse() {
intents_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListIntentsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3.IntentProto
.internal_static_google_cloud_dialogflow_cx_v3_ListIntentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3.IntentProto
.internal_static_google_cloud_dialogflow_cx_v3_ListIntentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3.ListIntentsResponse.class,
com.google.cloud.dialogflow.cx.v3.ListIntentsResponse.Builder.class);
}
public static final int INTENTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.dialogflow.cx.v3.Intent> intents_;
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.dialogflow.cx.v3.Intent> getIntentsList() {
return intents_;
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.dialogflow.cx.v3.IntentOrBuilder>
getIntentsOrBuilderList() {
return intents_;
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
@java.lang.Override
public int getIntentsCount() {
return intents_.size();
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.Intent getIntents(int index) {
return intents_.get(index);
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.IntentOrBuilder getIntentsOrBuilder(int index) {
return intents_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < intents_.size(); i++) {
output.writeMessage(1, intents_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < intents_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, intents_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3.ListIntentsResponse)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3.ListIntentsResponse other =
(com.google.cloud.dialogflow.cx.v3.ListIntentsResponse) obj;
if (!getIntentsList().equals(other.getIntentsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getIntentsCount() > 0) {
hash = (37 * hash) + INTENTS_FIELD_NUMBER;
hash = (53 * hash) + getIntentsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3.ListIntentsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.ListIntentsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.ListIntentsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.ListIntentsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.ListIntentsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3.ListIntentsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.ListIntentsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.ListIntentsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.ListIntentsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.ListIntentsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3.ListIntentsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3.ListIntentsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.cx.v3.ListIntentsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response message for
* [Intents.ListIntents][google.cloud.dialogflow.cx.v3.Intents.ListIntents].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3.ListIntentsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3.ListIntentsResponse)
com.google.cloud.dialogflow.cx.v3.ListIntentsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3.IntentProto
.internal_static_google_cloud_dialogflow_cx_v3_ListIntentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3.IntentProto
.internal_static_google_cloud_dialogflow_cx_v3_ListIntentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3.ListIntentsResponse.class,
com.google.cloud.dialogflow.cx.v3.ListIntentsResponse.Builder.class);
}
// Construct using com.google.cloud.dialogflow.cx.v3.ListIntentsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (intentsBuilder_ == null) {
intents_ = java.util.Collections.emptyList();
} else {
intents_ = null;
intentsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3.IntentProto
.internal_static_google_cloud_dialogflow_cx_v3_ListIntentsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.ListIntentsResponse getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3.ListIntentsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.ListIntentsResponse build() {
com.google.cloud.dialogflow.cx.v3.ListIntentsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.ListIntentsResponse buildPartial() {
com.google.cloud.dialogflow.cx.v3.ListIntentsResponse result =
new com.google.cloud.dialogflow.cx.v3.ListIntentsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.dialogflow.cx.v3.ListIntentsResponse result) {
if (intentsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
intents_ = java.util.Collections.unmodifiableList(intents_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.intents_ = intents_;
} else {
result.intents_ = intentsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.dialogflow.cx.v3.ListIntentsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3.ListIntentsResponse) {
return mergeFrom((com.google.cloud.dialogflow.cx.v3.ListIntentsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3.ListIntentsResponse other) {
if (other == com.google.cloud.dialogflow.cx.v3.ListIntentsResponse.getDefaultInstance())
return this;
if (intentsBuilder_ == null) {
if (!other.intents_.isEmpty()) {
if (intents_.isEmpty()) {
intents_ = other.intents_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureIntentsIsMutable();
intents_.addAll(other.intents_);
}
onChanged();
}
} else {
if (!other.intents_.isEmpty()) {
if (intentsBuilder_.isEmpty()) {
intentsBuilder_.dispose();
intentsBuilder_ = null;
intents_ = other.intents_;
bitField0_ = (bitField0_ & ~0x00000001);
intentsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getIntentsFieldBuilder()
: null;
} else {
intentsBuilder_.addAllMessages(other.intents_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.dialogflow.cx.v3.Intent m =
input.readMessage(
com.google.cloud.dialogflow.cx.v3.Intent.parser(), extensionRegistry);
if (intentsBuilder_ == null) {
ensureIntentsIsMutable();
intents_.add(m);
} else {
intentsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.dialogflow.cx.v3.Intent> intents_ =
java.util.Collections.emptyList();
private void ensureIntentsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
intents_ = new java.util.ArrayList<com.google.cloud.dialogflow.cx.v3.Intent>(intents_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3.Intent,
com.google.cloud.dialogflow.cx.v3.Intent.Builder,
com.google.cloud.dialogflow.cx.v3.IntentOrBuilder>
intentsBuilder_;
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
public java.util.List<com.google.cloud.dialogflow.cx.v3.Intent> getIntentsList() {
if (intentsBuilder_ == null) {
return java.util.Collections.unmodifiableList(intents_);
} else {
return intentsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
public int getIntentsCount() {
if (intentsBuilder_ == null) {
return intents_.size();
} else {
return intentsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
public com.google.cloud.dialogflow.cx.v3.Intent getIntents(int index) {
if (intentsBuilder_ == null) {
return intents_.get(index);
} else {
return intentsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
public Builder setIntents(int index, com.google.cloud.dialogflow.cx.v3.Intent value) {
if (intentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIntentsIsMutable();
intents_.set(index, value);
onChanged();
} else {
intentsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
public Builder setIntents(
int index, com.google.cloud.dialogflow.cx.v3.Intent.Builder builderForValue) {
if (intentsBuilder_ == null) {
ensureIntentsIsMutable();
intents_.set(index, builderForValue.build());
onChanged();
} else {
intentsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
public Builder addIntents(com.google.cloud.dialogflow.cx.v3.Intent value) {
if (intentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIntentsIsMutable();
intents_.add(value);
onChanged();
} else {
intentsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
public Builder addIntents(int index, com.google.cloud.dialogflow.cx.v3.Intent value) {
if (intentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIntentsIsMutable();
intents_.add(index, value);
onChanged();
} else {
intentsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
public Builder addIntents(com.google.cloud.dialogflow.cx.v3.Intent.Builder builderForValue) {
if (intentsBuilder_ == null) {
ensureIntentsIsMutable();
intents_.add(builderForValue.build());
onChanged();
} else {
intentsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
public Builder addIntents(
int index, com.google.cloud.dialogflow.cx.v3.Intent.Builder builderForValue) {
if (intentsBuilder_ == null) {
ensureIntentsIsMutable();
intents_.add(index, builderForValue.build());
onChanged();
} else {
intentsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
public Builder addAllIntents(
java.lang.Iterable<? extends com.google.cloud.dialogflow.cx.v3.Intent> values) {
if (intentsBuilder_ == null) {
ensureIntentsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, intents_);
onChanged();
} else {
intentsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
public Builder clearIntents() {
if (intentsBuilder_ == null) {
intents_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
intentsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
public Builder removeIntents(int index) {
if (intentsBuilder_ == null) {
ensureIntentsIsMutable();
intents_.remove(index);
onChanged();
} else {
intentsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
public com.google.cloud.dialogflow.cx.v3.Intent.Builder getIntentsBuilder(int index) {
return getIntentsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
public com.google.cloud.dialogflow.cx.v3.IntentOrBuilder getIntentsOrBuilder(int index) {
if (intentsBuilder_ == null) {
return intents_.get(index);
} else {
return intentsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
public java.util.List<? extends com.google.cloud.dialogflow.cx.v3.IntentOrBuilder>
getIntentsOrBuilderList() {
if (intentsBuilder_ != null) {
return intentsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(intents_);
}
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
public com.google.cloud.dialogflow.cx.v3.Intent.Builder addIntentsBuilder() {
return getIntentsFieldBuilder()
.addBuilder(com.google.cloud.dialogflow.cx.v3.Intent.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
public com.google.cloud.dialogflow.cx.v3.Intent.Builder addIntentsBuilder(int index) {
return getIntentsFieldBuilder()
.addBuilder(index, com.google.cloud.dialogflow.cx.v3.Intent.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of intents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.cx.v3.Intent intents = 1;</code>
*/
public java.util.List<com.google.cloud.dialogflow.cx.v3.Intent.Builder>
getIntentsBuilderList() {
return getIntentsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3.Intent,
com.google.cloud.dialogflow.cx.v3.Intent.Builder,
com.google.cloud.dialogflow.cx.v3.IntentOrBuilder>
getIntentsFieldBuilder() {
if (intentsBuilder_ == null) {
intentsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3.Intent,
com.google.cloud.dialogflow.cx.v3.Intent.Builder,
com.google.cloud.dialogflow.cx.v3.IntentOrBuilder>(
intents_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
intents_ = null;
}
return intentsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3.ListIntentsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3.ListIntentsResponse)
private static final com.google.cloud.dialogflow.cx.v3.ListIntentsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3.ListIntentsResponse();
}
public static com.google.cloud.dialogflow.cx.v3.ListIntentsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListIntentsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListIntentsResponse>() {
@java.lang.Override
public ListIntentsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListIntentsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListIntentsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3.ListIntentsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/derby | 36,985 | java/org.apache.derby.tests/org/apache/derbyTesting/functionTests/tests/lang/OffsetFetchNextTest.java | /*
Derby - Class org.apache.derbyTesting.functionTests.tests.lang.OffsetFetchNextTest
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derbyTesting.functionTests.tests.lang;
import java.sql.ParameterMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import junit.framework.Test;
import org.apache.derbyTesting.junit.BaseJDBCTestCase;
import org.apache.derbyTesting.junit.BaseTestSuite;
import org.apache.derbyTesting.junit.CleanDatabaseTestSetup;
import org.apache.derbyTesting.junit.JDBC;
import org.apache.derbyTesting.junit.TestConfiguration;
/**
* Test {@code <result offset clause>} and {@code <fetch first clause>}.
*/
public class OffsetFetchNextTest extends BaseJDBCTestCase {
private final static String LANG_FORMAT_EXCEPTION = "22018";
private final static String LANG_INTEGER_LITERAL_EXPECTED = "42X20";
private final static String LANG_INVALID_ROW_COUNT_FIRST = "2201W";
private final static String LANG_INVALID_ROW_COUNT_OFFSET = "2201X";
private final static String LANG_MISSING_PARMS = "07000";
private final static String LANG_SYNTAX_ERROR = "42X01";
private final static String LANG_ROW_COUNT_OFFSET_FIRST_IS_NULL = "2201Z";
private final static String PERCENT_TOKEN = "%";
// flavors of SQL Standard syntax
private final static String FIRST_ROWS_ONLY = "fetch first % rows only";
private final static String FIRST_ROW_ONLY = "fetch first % row only";
private final static String NEXT_ROWS_ONLY = "fetch next % rows only";
// variants
private final static int SQL_STANDARD_VARIANT = 0;
private final static int JDBC_VARIANT = SQL_STANDARD_VARIANT + 1;
private final static int VARIANT_COUNT = JDBC_VARIANT + 1;
public OffsetFetchNextTest(String name) {
super(name);
}
public static Test suite() {
BaseTestSuite suite = new BaseTestSuite("OffsetFetchNextTest");
suite.addTest(
baseSuite("OffsetFetchNextTest:embedded"));
suite.addTest(
TestConfiguration.clientServerDecorator(
baseSuite("OffsetFetchNextTest:client")));
return suite;
}
public static Test baseSuite(String suiteName) {
return new CleanDatabaseTestSetup(
new BaseTestSuite(OffsetFetchNextTest.class,
suiteName)) {
@Override
protected void decorateSQL(Statement s)
throws SQLException {
createSchemaObjects(s);
}
};
}
/**
* Creates tables used by the tests (never modified, we use rollback after
* changes).
*/
private static void createSchemaObjects(Statement st) throws SQLException
{
// T1 (no indexes)
st.executeUpdate("create table t1 (a int, b bigint)");
st.executeUpdate("insert into t1 (a, b) " +
"values (1,1), (1,2), (1,3), (1,4), (1,5)");
// T2 (primary key)
st.executeUpdate("create table t2 (a int primary key, b bigint)");
st.executeUpdate("insert into t2 (a, b) " +
"values (1,1), (2,1), (3,1), (4,1), (5,1)");
// T3 (primary key + secondary key)
st.executeUpdate("create table t3 (a int primary key, " +
" b bigint unique)");
st.executeUpdate("insert into t3 (a, b) " +
"values (1,1), (2,2), (3,3), (4,4), (5,5)");
}
/**
* Negative tests. Test various invalid OFFSET and FETCH NEXT clauses.
*
* @throws java.sql.SQLException
*/
public void testErrors() throws SQLException
{
Statement st = createStatement();
String stub = "select * from t1 %";
// Wrong range in row count argument
vetStatement( st, LANG_INVALID_ROW_COUNT_OFFSET, stub, FIRST_ROWS_ONLY, "-1", null, null );
vetStatement( st, LANG_SYNTAX_ERROR, stub, FIRST_ROWS_ONLY, "-?", null, null );
assertStatementError(LANG_INVALID_ROW_COUNT_FIRST, st,
"select * from t1 fetch first 0 rows only");
vetStatement( st, LANG_INVALID_ROW_COUNT_FIRST, stub, FIRST_ROWS_ONLY, null, "-1", null );
// Wrong type in row count argument
vetStatement( st, LANG_INTEGER_LITERAL_EXPECTED, stub, FIRST_ROWS_ONLY, null, "3.14", null );
// Wrong order of clauses
assertStatementError(LANG_SYNTAX_ERROR, st,
"select * from t1 " +
"fetch first 0 rows only offset 0 rows");
assertStatementError(LANG_SYNTAX_ERROR, st,
"select * from t1 { offset 0 limit 0 }");
}
/**
* Positive tests. Check that the new keyword OFFSET introduced is not
* reserved so we don't risk breaking existing applications.
*
* @throws java.sql.SQLException
*/
public void testNewKeywordNonReserved() throws SQLException
{
setAutoCommit(false);
prepareStatement("select a,b as offset from t1 offset 0 rows");
prepareStatement("select a,b as limit from t1 offset 0 rows");
// Column and table correlation name usage
prepareStatement("select a,b from t1 as offset");
prepareStatement("select a,b from t1 as limit");
prepareStatement("select a,b offset from t1 offset");
prepareStatement("select a,b limit from t1 limit");
prepareStatement("select a,b offset from t1 offset +2 rows");
prepareStatement("select a offset,b from t1 offset ? rows");
prepareStatement("select offset.a, offset.b offset from t1 as offset offset ? rows");
prepareStatement("select limit.a, limit.b offset from t1 as limit offset ? rows");
// DERBY-4562
Statement s = createStatement();
s.executeUpdate("create table t4562(i int, offset int)");
ResultSet rs = s.executeQuery(
"select * from t4562 where i > 0 and offset + i < 0 offset 2 rows");
rs.next();
rs = s.executeQuery(
"select * from t4562 where i > 0 and offset - i < 0 offset 2 rows");
rs.next();
rs = s.executeQuery(
"select * from t4562 where i > 0 and offset * i < 0 offset 2 rows");
rs.next();
rs.close();
rollback();
}
/**
* Positive tests.
*
* @throws java.sql.SQLException
*/
public void testOffsetFetchFirstReadOnlyForwardOnlyRS() throws SQLException
{
Statement stm = createStatement();
/*
* offset 0 rows (a no-op)
*/
vetStatement
(
stm, null, "select a, b from t1%", FIRST_ROWS_ONLY, "0", null,
new String [][] { {"1","1"}, {"1","2"},{"1","3"}, {"1","4"},{"1","5"} }
);
vetStatement
(
stm, null, "select a,b from t2%", FIRST_ROWS_ONLY, "0", null,
new String [][] { {"1","1"}, {"2","1"},{"3","1"}, {"4","1"},{"5","1"} }
);
vetStatement
(
stm, null, "select a,b from t3%", FIRST_ROWS_ONLY, "0", null,
new String [][] { {"1","1"}, {"2","2"},{"3","3"}, {"4","4"},{"5","5"} }
);
/*
* offset 1 rows
*/
vetStatement
(
stm, null, "select a,b from t1%", FIRST_ROWS_ONLY, "1", null,
new String [][] { {"1","2"},{"1","3"}, {"1","4"},{"1","5"} }
);
vetStatement
(
stm, null, "select a,b from t2%", FIRST_ROWS_ONLY, "1", null,
new String [][] { {"2","1"},{"3","1"}, {"4","1"},{"5","1"} }
);
vetStatement
(
stm, null, "select a,b from t3%", FIRST_ROWS_ONLY, "1", null,
new String [][] { {"2","2"},{"3","3"}, {"4","4"},{"5","5"} }
);
/*
* offset 4 rows
*/
vetStatement
(
stm, null, "select a,b from t1%", FIRST_ROWS_ONLY, "4", null,
new String [][] { {"1","5"} }
);
vetStatement
(
stm, null, "select a,b from t2%", FIRST_ROWS_ONLY, "4", null,
new String [][] { {"5","1"} }
);
vetStatement
(
stm, null, "select a,b from t3%", FIRST_ROWS_ONLY, "4", null,
new String [][] { {"5","5"} }
);
/*
* offset 1 rows fetch 1 row. Use "next"/"rows" syntax
*/
vetStatement
(
stm, null, "select a,b from t1%", FIRST_ROWS_ONLY, "1", "1",
new String [][] { {"1","2"} }
);
vetStatement
(
stm, null, "select a,b from t2%", FIRST_ROWS_ONLY, "1", "1",
new String [][] { {"2","1"} }
);
vetStatement
(
stm, null, "select a,b from t3%", FIRST_ROWS_ONLY, "1", "1",
new String [][] { {"2","2"} }
);
/*
* offset 1 rows fetch so many rows we drain rs row. Use "first"/"row"
* syntax
*/
vetStatement
(
stm, null, "select a,b from t1%", FIRST_ROW_ONLY, "1", "10",
new String [][] { {"1","2"},{"1","3"}, {"1","4"},{"1","5"} }
);
vetStatement
(
stm, null, "select a,b from t2%", FIRST_ROW_ONLY, "1", "10",
new String [][] { {"2","1"},{"3","1"}, {"4","1"},{"5","1"} }
);
vetStatement
(
stm, null, "select a,b from t3%", FIRST_ROW_ONLY, "1", "10",
new String [][] { {"2","2"},{"3","3"}, {"4","4"},{"5","5"} }
);
/*
* offset so many rows that we see empty rs
*/
vetStatement
(
stm, null, "select a,b from t1%", FIRST_ROW_ONLY, "10", null,
new String [][] { }
);
vetStatement
(
stm, null, "select a,b from t2%", FIRST_ROW_ONLY, "10", null,
new String [][] { }
);
vetStatement
(
stm, null, "select a,b from t3%", FIRST_ROW_ONLY, "10", null,
new String [][] { }
);
/*
* fetch first/next row (no row count given)
*/
queryAndCheck(
stm,
"select a,b from t1 fetch first row only",
new String [][] {{"1","1"}});
queryAndCheck(
stm,
"select a,b from t2 fetch next row only",
new String [][] {{"1","1"}});
queryAndCheck(
stm,
"select a,b from t3 fetch next row only",
new String [][] {{"1","1"}});
/*
* Combine with order by asc
*/
queryAndCheck(
stm,
"select a,b from t1 order by b asc fetch first row only",
new String [][] {{"1","1"}});
queryAndCheck(
stm,
"select a,b from t2 order by a asc fetch next row only",
new String [][] {{"1","1"}});
queryAndCheck(
stm,
"select a,b from t3 order by a asc fetch next row only",
new String [][] {{"1","1"}});
/*
* Combine with order by desc.
*/
queryAndCheck(
stm,
// Note: use column b here since for t1 all column a values are the
// same and order can change after sorting, want unique row first
// in rs so we can test it.
"select a,b from t1 order by b desc fetch first row only",
new String [][] {{"1","5"}});
queryAndCheck(
stm,
"select a,b from t2 order by a desc fetch next row only",
new String [][] {{"5","1"}});
queryAndCheck(
stm,
"select a,b from t3 order by a desc fetch next row only",
new String [][] {{"5","5"}});
/*
* Combine with group by, order by.
*/
queryAndCheck(
stm,
"select max(a) from t1 group by b fetch first row only",
new String [][] {{"1"}});
vetStatement
(
stm, null, "select max(a) from t2 group by b %", FIRST_ROW_ONLY, "0", null,
new String [][] { {"5"} }
);
vetStatement
(
stm, null, "select max(a) from t3 group by b order by max(a) %", NEXT_ROWS_ONLY, null, "2",
new String [][] { {"1"},{"2"} }
);
/*
* Combine with union
*/
vetStatement
(
stm, null, "select * from t1 union all select * from t1 %", FIRST_ROW_ONLY, null, "2",
new String [][] { {"1","1"}, {"1","2"} }
);
/*
* Combine with join
*/
vetStatement
(
stm, null, "select t2.b, t3.b from t2,t3 where t2.a=t3.a %", FIRST_ROW_ONLY, null, "2",
new String [][] { {"1","1"}, {"1","2"} }
);
stm.close();
}
/**
* Positive tests.
*
* @throws java.sql.SQLException
*/
public void testOffsetFetchFirstUpdatableForwardOnlyRS() throws SQLException
{
Statement stm = createStatement(ResultSet.TYPE_FORWARD_ONLY,
ResultSet.CONCUR_UPDATABLE);
ResultSet rs;
String[] variants;
setAutoCommit(false);
/*
* offset 0 rows (a no-op), update a row and verify result
*/
variants = makeVariants( "select * from t1 %", FIRST_ROWS_ONLY, "0", null );
for (String variant : variants)
{
rs = stm.executeQuery( variant );
rs.next();
rs.next(); // at row 2
rs.updateInt(1, -rs.getInt(1));
rs.updateRow();
rs.close();
queryAndCheck(
stm,
"select a,b from t1",
new String [][] {
{"1","1"}, {"-1","2"},{"1","3"}, {"1","4"},{"1","5"}});
rollback();
}
/*
* offset 1 rows, update a row and verify result
*/
variants = makeVariants( "select * from t1 %", FIRST_ROWS_ONLY, "1", null );
for ( String variant : variants )
{
rs = stm.executeQuery( variant );
rs.next(); // at row 1, but row 2 of underlying rs
rs.updateInt(1, -rs.getInt(1));
rs.updateRow();
rs.close();
queryAndCheck(
stm,
"select a,b from t1",
new String [][] {
{"1","1"}, {"-1","2"},{"1","3"}, {"1","4"},{"1","5"}});
rollback();
}
stm.close();
}
/**
* Positive tests with scrollable read-only.
*
* @throws java.sql.SQLException
*/
public void testOffsetFetchFirstReadOnlyScrollableRS() throws SQLException
{
Statement stm = createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
ResultSet.CONCUR_READ_ONLY);
ResultSet rs;
String[] variants;
/*
* offset 0 rows (a no-op), update a row and verify result
*/
variants = makeVariants( "select * from t1 %", FIRST_ROWS_ONLY, "0", null );
for ( String variant : variants )
{
rs = stm.executeQuery( variant );
rs.next();
rs.next(); // at row 2
assertTrue(rs.getInt(2) == 2);
rs.close();
}
/*
* offset 1 rows, fetch 3 row, check that we have the right ones
*/
variants = makeVariants( "select * from t1 %", FIRST_ROWS_ONLY, "1", "3" );
for ( String variant : variants )
{
rs = stm.executeQuery( variant );
rs.next();
rs.next(); // at row 2, but row 3 of underlying rs
assertTrue(rs.getInt(2) == 3);
// Go backbards and update
rs.previous();
assertTrue(rs.getInt(2) == 2);
// Try some navigation and border conditions
rs.previous();
assertTrue(rs.isBeforeFirst());
rs.next();
rs.next();
rs.next();
rs.next();
assertTrue(rs.isAfterLast());
}
stm.close();
}
/**
* Positive tests with SUR (Scrollable updatable result set).
*
* @throws java.sql.SQLException
*/
public void testOffsetFetchFirstUpdatableScrollableRS() throws SQLException
{
Statement stm = createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
ResultSet.CONCUR_UPDATABLE);
ResultSet rs;
String[] variants;
setAutoCommit(false);
/*
* offset 0 rows (a no-op), update a row and verify result
* also try the "for update" syntax so we see that it still works
*/
variants = makeVariants( "select * from t1 % for update", FIRST_ROWS_ONLY, "0", null );
for (String variant : variants)
{
rs = stm.executeQuery( variant );
rs.next();
rs.next(); // at row 2
rs.updateInt(1, -rs.getInt(1));
rs.updateRow();
rs.close();
queryAndCheck(
stm,
"select a,b from t1",
new String [][] {
{"1","1"}, {"-1","2"},{"1","3"}, {"1","4"},{"1","5"}});
rollback();
}
/*
* offset 1 rows, fetch 3 row, update some rows and verify result
*/
variants = makeVariants( "select * from t1 %", NEXT_ROWS_ONLY, "1", "3" );
for ( String variant : variants )
{
rs = stm.executeQuery( variant );
rs.next();
rs.next(); // at row 2, but row 3 of underlying rs
rs.updateInt(1, -rs.getInt(1));
rs.updateRow();
// Go backbards and update
rs.previous();
rs.updateInt(1, -rs.getInt(1));
rs.updateRow();
// Try some navigation and border conditions
rs.previous();
assertTrue(rs.isBeforeFirst());
rs.next();
rs.next();
rs.next();
rs.next();
assertTrue(rs.isAfterLast());
// Insert a row
rs.moveToInsertRow();
rs.updateInt(1,42);
rs.updateInt(2,42);
rs.insertRow();
// Delete a row
rs.previous();
rs.deleteRow();
// .. and see that a hole is left in its place
rs.previous();
rs.next();
assertTrue(rs.rowDeleted());
rs.close();
queryAndCheck(
stm,
"select a,b from t1",
new String [][] {
{"1","1"}, {"-1","2"},{"-1","3"},{"1","5"},{"42","42"}});
rollback();
}
// Test with projection
variants = makeVariants( "select * from t1 where a + 1 < b%", NEXT_ROWS_ONLY, "1", null );
for (String variant : variants)
{
rs = stm.executeQuery( variant );
// should yield 2 rows
rs.absolute(2);
assertTrue(rs.getInt(2) == 5);
rs.updateInt(2, -5);
rs.updateRow();
rs.close();
queryAndCheck(
stm,
"select a,b from t1",
new String [][] {
{"1","1"}, {"1","2"},{"1","3"},{"1","4"},{"1","-5"}});
rollback();
}
stm.close();
}
public void testValues() throws SQLException
{
Statement stm = createStatement();
vetStatement
(
stm, null, "values 4%", FIRST_ROW_ONLY, null, "2",
new String [][] { {"4"} }
);
vetStatement
(
stm, null, "values 4%", FIRST_ROW_ONLY, "1", null,
new String [][] { }
);
stm.close();
}
/**
* Positive tests, result set metadata
*
* @throws java.sql.SQLException
*/
public void testMetadata() throws SQLException
{
Statement stm = createStatement();
ResultSet rs;
String[] variants;
variants = makeVariants( "select * from t1%", NEXT_ROWS_ONLY, "1", null );
for (String variant : variants)
{
rs = stm.executeQuery( variant );
ResultSetMetaData rsmd= rs.getMetaData();
int cnt = rsmd.getColumnCount();
String[] cols = new String[]{"A","B"};
int[] types = {Types.INTEGER, Types.BIGINT};
for (int i=1; i <= cnt; i++) {
String name = rsmd.getColumnName(i);
int type = rsmd.getColumnType(i);
assertTrue(name.equals(cols[i-1]));
assertTrue(type == types[i-1]);
}
rs.close();
}
stm.close();
}
/**
* Test that we see correct traces of the filtering in the statistics
*
* @throws java.sql.SQLException
*/
public void testRunTimeStatistics() throws SQLException
{
Statement stm = createStatement();
ResultSet rs;
String[] variants;
variants = makeVariants( "select a,b from t1%", NEXT_ROWS_ONLY, "2", null );
for (String variant : variants)
{
stm.executeUpdate( "call syscs_util.syscs_set_runtimestatistics(1)" );
queryAndCheck(
stm,
variant,
new String [][] {
{"1","3"}, {"1","4"},{"1","5"}});
stm.executeUpdate( "call syscs_util.syscs_set_runtimestatistics(0)" );
rs = stm.executeQuery( "values syscs_util.syscs_get_runtimestatistics()" );
rs.next();
String plan = rs.getString(1);
// Verify that the plan shows the filtering (2 rows of 3 seen):
assertTrue(plan.indexOf("Row Count (1):\n" +
"Number of opens = 1\n" +
"Rows seen = 3\n" +
"Rows filtered = 2") != -1);
rs.close();
}
stm.close();
}
/**
* Test against a bigger table
*
* @throws java.sql.SQLException
*/
public void testBigTable() throws SQLException
{
Statement stm = createStatement();
setAutoCommit(false);
stm.executeUpdate("declare global temporary table session.t (i int) " +
"on commit preserve rows not logged");
PreparedStatement ps =
prepareStatement("insert into session.t values ?");
for (int i=1; i <= 100000; i++) {
ps.setInt(1, i);
ps.executeUpdate();
if (i % 10000 == 0) {
commit();
}
}
queryAndCheck(
stm,
"select count(*) from session.t",
new String [][] {
{"100000"}});
vetStatement
(
stm, null, "select i from session.t%", FIRST_ROWS_ONLY, "99999", null,
new String [][] { {"100000"} }
);
stm.executeUpdate("drop table session.t");
stm.close();
}
/**
* Test that the values of offset and fetch first are not forgotten if
* a {@code PreparedStatement} is executed multiple times (DERBY-4212).
*
* @throws java.sql.SQLException
*/
public void testRepeatedExecution() throws SQLException
{
PreparedStatement ps;
String[] variants;
variants = makeVariants( "select * from t1 order by b%", NEXT_ROWS_ONLY, "2", "2" );
for (String variant : variants)
{
ps = prepareStatement( variant );
String[][] expected = {{"1", "3"}, {"1", "4"}};
for (int i = 0; i < 10; i++) {
JDBC.assertFullResultSet(ps.executeQuery(), expected);
}
}
}
/**
* Test dynamic arguments
*
* @throws java.sql.SQLException
*/
public void testDynamicArgs() throws SQLException
{
PreparedStatement ps;
String[] variants;
String[][] expected = null;
// Check look-ahead also for ? in grammar since offset is not reserved
variants = makeVariants( "select * from t1%", NEXT_ROWS_ONLY, "?", null );
for (String variant : variants)
{
ps = prepareStatement( variant );
}
variants = makeVariants( "select * from t1 order by b%", NEXT_ROWS_ONLY, "?", "?" );
for ( int j = 0; j < variants.length; j++ )
{
// SQL Standard and JDBC limit/offset parameter orders are different
int offsetParam = ( j == SQL_STANDARD_VARIANT ) ? 1 : 2;
int fetchParam = ( j == SQL_STANDARD_VARIANT ) ? 2 : 1;
expected = new String[][] {{"1", "3"}, {"1", "4"}};
ps = prepareStatement( variants[ j ] );
// Check range errors
ps.setInt( offsetParam, 0 );
assertPreparedStatementError(LANG_MISSING_PARMS, ps);
ps.setInt( offsetParam, -1 );
ps.setInt( fetchParam, 2 );
assertPreparedStatementError(LANG_INVALID_ROW_COUNT_OFFSET, ps);
ps.setInt( offsetParam, 0 );
ps.setInt( fetchParam, ( j == SQL_STANDARD_VARIANT ) ? 0 : -1 );
assertPreparedStatementError(LANG_INVALID_ROW_COUNT_FIRST, ps);
// Check non-integer values
try {
ps.setString( offsetParam, "aaa");
} catch (SQLException e) {
assertSQLState(LANG_FORMAT_EXCEPTION, e);
}
try {
ps.setString( fetchParam, "aaa");
} catch (SQLException e) {
assertSQLState(LANG_FORMAT_EXCEPTION, e);
}
// A normal case
for (int i = 0; i < 2; i++) {
ps.setInt( offsetParam,2 );
ps.setInt( fetchParam,2 );
JDBC.assertFullResultSet(ps.executeQuery(), expected);
}
// Now, note that since we now have different values for offset and
// fetch first, we also exercise reusing the result set for this
// prepared statement (i.e. the values are computed at execution time,
// not at result set generation time). Try long value for change.
ps.setLong( offsetParam, 1L );
ps.setInt( fetchParam, 3 );
expected = new String[][]{{"1", "2"}, {"1", "3"}, {"1", "4"}};
JDBC.assertFullResultSet(ps.executeQuery(), expected);
// Try a large number
ps.setLong( offsetParam, Integer.MAX_VALUE * 2L );
ps.setInt( fetchParam, 5 );
JDBC.assertEmpty(ps.executeQuery());
}
// Mix of prepared and not
variants = makeVariants( "select * from t1 order by b%", NEXT_ROWS_ONLY, "?", "3" );
for (String variant : variants)
{
ps = prepareStatement( variant );
ps.setLong(1, 1L);
JDBC.assertFullResultSet(ps.executeQuery(), expected);
}
variants = makeVariants( "select * from t1 order by b%", NEXT_ROWS_ONLY, "4", "?" );
for (String variant : variants)
{
ps = prepareStatement( variant );
ps.setLong(1, 1L);
JDBC.assertFullResultSet(ps.executeQuery(), new String[][]{{"1", "5"}});
}
// Mix of other dyn args and ours:
variants = makeVariants( "select * from t1 where a = ? order by b%", NEXT_ROWS_ONLY, "?", "3" );
for (String variant : variants)
{
ps = prepareStatement( variant );
ps.setInt(1, 1);
ps.setLong(2, 1L);
JDBC.assertFullResultSet(ps.executeQuery(), expected);
}
variants = makeVariants( "select * from t1 where a = ? order by b%", NEXT_ROWS_ONLY, "1", "?" );
for (String variant : variants)
{
ps = prepareStatement( variant );
ps.setInt(1, 1);
ps.setLong(2, 2L);
expected = new String[][]{{"1", "2"}, {"1", "3"}};
JDBC.assertFullResultSet(ps.executeQuery(), expected);
}
// NULLs not allowed (Note: parameter metadata says "isNullable" for
// all ? args in Derby...)
variants = makeVariants( "select * from t1 order by b%", NEXT_ROWS_ONLY, "?", "?" );
for ( int i = 0; i < variants.length; i++ )
{
ps = prepareStatement( variants[ i ] );
int offsetParam = ( i == SQL_STANDARD_VARIANT ) ? 1 : 2;
int fetchParam = ( i == SQL_STANDARD_VARIANT ) ? 2 : 1;
ps.setNull( offsetParam, Types.BIGINT );
ps.setInt( fetchParam, 2 );
assertPreparedStatementError(LANG_ROW_COUNT_OFFSET_FIRST_IS_NULL, ps);
ps.setInt( offsetParam,1 );
ps.setNull( fetchParam, Types.BIGINT );
assertPreparedStatementError(LANG_ROW_COUNT_OFFSET_FIRST_IS_NULL, ps);
ps.close();
}
}
/**
* Test dynamic arguments
*
* @throws java.sql.SQLException
*/
public void testDynamicArgsMetaData() throws SQLException
{
//since there is no getParameterMetaData() call available in JSR169
//implementations, do not run this test if we are running JSR169
if (JDBC.vmSupportsJSR169()) return;
PreparedStatement ps;
String[] variants;
variants = makeVariants( "select * from t1 where a = ? order by b%", NEXT_ROWS_ONLY, "?", "?" );
for (String variant : variants)
{
ps = prepareStatement( variant );
ParameterMetaData pmd = ps.getParameterMetaData();
int[] expectedTypes = { Types.INTEGER, Types.BIGINT, Types.BIGINT };
for (int i = 0; i < 3; i++) {
assertEquals("Unexpected parameter type",
expectedTypes[i], pmd.getParameterType(i+1));
assertEquals("Derby ? args are nullable",
// Why is that? Cf. logic in ParameterNode.setType
ParameterMetaData.parameterNullable,
pmd.isNullable(i+1));
}
ps.close();
}
}
/**
* Test some additional corner cases in JDBC limit/offset syntax.
*
* @throws java.sql.SQLException
*/
public void testJDBCLimitOffset() throws SQLException
{
// LIMIT 0 is allowed. It means: everything from the OFFSET forward
PreparedStatement ps = prepareStatement( "select a from t2 order by a { limit ? }" );
ps.setInt( 1, 0 );
JDBC.assertFullResultSet
(
ps.executeQuery(),
new String[][] { { "1" }, { "2" }, { "3" }, { "4" }, { "5" } }
);
ps.close();
ps = prepareStatement( "select a from t2 order by a { limit ? offset 3 }" );
ps.setInt( 1, 0 );
JDBC.assertFullResultSet
(
ps.executeQuery(),
new String[][] { { "4" }, { "5" } }
);
ps.close();
// mix JDBC and SQL Standard syntax
ps = prepareStatement
(
"select t.a from\n" +
"( select * from t2 order by a { limit 3 offset 1 } ) t,\n" +
"( select * from t3 order by a offset 2 rows fetch next 10 rows only ) s\n" +
"where t.a = s.a order by t.a"
);
JDBC.assertFullResultSet
(
ps.executeQuery(),
new String[][] { { "3" }, { "4" } }
);
ps.close();
}
/**
* Run a statement with both SQL Standard and JDBC limit/offset syntax. Verify
* that we get the expected error or results. The statement has a % literal at the
* point where the offset/fetchFirst and limit/offset clauses are to be inserted.
*/
private void vetStatement
( Statement stmt, String sqlState, String stub, String fetchFormat, String offset, String fetchFirst, String[][] expectedResults )
throws SQLException
{
String[] variants = makeVariants( stub, fetchFormat, offset, fetchFirst );
for (String text : variants)
{
if ( sqlState != null )
{
assertStatementError( sqlState, stmt, text );
}
else
{
queryAndCheck( stmt, text, expectedResults );
}
}
}
/**
* Make the SQL Standard and JDBC limit/offset variants of a stub statement,
* plugging in the given offset and fetch count.
*/
private String[] makeVariants
( String stub, String fetchFormat, String offset, String fetchFirst )
{
String[] result = new String[ VARIANT_COUNT ];
result[ SQL_STANDARD_VARIANT ] = makeSQLStandardText( stub, fetchFormat, offset, fetchFirst );
result[ JDBC_VARIANT ] = makeJDBCText( stub, offset, fetchFirst );
return result;
}
/**
* Substitute the SQL Standard syntax into a stub statement, given an offset and fetch count.
*/
private String makeSQLStandardText
( String stub, String fetchFormat, String offset, String fetchFirst )
{
String sqlStandardText = "";
if ( offset != null )
{
sqlStandardText = " offset " + offset + " rows ";
}
if ( fetchFirst != null )
{
sqlStandardText = sqlStandardText + substitute( fetchFormat, PERCENT_TOKEN, fetchFirst );
}
sqlStandardText = substitute( stub, PERCENT_TOKEN, sqlStandardText );
println( sqlStandardText );
return sqlStandardText;
}
/**
* Substitute JDBC limit/offset syntax into a stub statement, given an offset and fetch count.
*/
private String makeJDBCText
( String stub, String offset, String fetchFirst )
{
String jdbcText = "";
if ( offset != null )
{
jdbcText = " offset " + offset;
}
if ( fetchFirst != null )
{
jdbcText = " limit " + fetchFirst + " " + jdbcText;
}
else
{
jdbcText = "limit 0 " + jdbcText;
}
jdbcText = substitute( stub, PERCENT_TOKEN, " { " + jdbcText + " } " );
println( jdbcText );
return jdbcText;
}
private String substitute( String stub, String token, String replacement )
{
int substitutionIndex = stub.indexOf( token );
if ( substitutionIndex < 0 ) { fail( "Bad stub: " + stub + ". Can't find token: " + token ); }
String prefix = stub.substring( 0, substitutionIndex );
String suffix = ( substitutionIndex == stub.length() - 1 ) ?
"" : stub.substring( substitutionIndex + 1, stub.length() );
return prefix + replacement + suffix;
}
private void queryAndCheck(
Statement stm,
String queryText,
String [][] expectedRows) throws SQLException {
ResultSet rs = stm.executeQuery(queryText);
JDBC.assertFullResultSet(rs, expectedRows);
}
}
|
apache/hive | 37,897 | ql/src/java/org/apache/hadoop/hive/ql/parse/QBSubQuery.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.parse;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.Stack;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.SemanticNodeProcessor;
import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSubquerySemanticException;
import org.apache.hadoop.hive.ql.parse.SubQueryDiagnostic.QBSubQueryRewrite;
import org.apache.hadoop.hive.ql.parse.SubQueryUtils.ISubQueryJoinInfo;
import org.apache.hadoop.hive.ql.parse.type.ExprNodeTypeCheck;
import org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
public class QBSubQuery implements ISubQueryJoinInfo {
public static enum SubQueryType {
EXISTS,
NOT_EXISTS,
IN,
NOT_IN,
SOME,
ALL,
SCALAR;
public static SubQueryType get(ASTNode opNode) throws SemanticException {
if(opNode == null) {
return SCALAR;
}
switch(opNode.getType()) {
// opNode's type is always either KW_EXISTS or KW_IN never NOTEXISTS or NOTIN
// to figure this out we need to check it's grand parent's parent
case HiveParser.KW_EXISTS:
if(opNode.getParent().getParent().getParent() != null
&& opNode.getParent().getParent().getParent().getType() == HiveParser.KW_NOT) {
return NOT_EXISTS;
}
return EXISTS;
case HiveParser.TOK_SUBQUERY_OP_NOTEXISTS:
return NOT_EXISTS;
case HiveParser.KW_IN:
if(opNode.getParent().getParent().getParent() != null
&& opNode.getParent().getParent().getParent().getType() == HiveParser.KW_NOT) {
return NOT_IN;
}
return IN;
case HiveParser.TOK_SUBQUERY_OP_NOTIN:
return NOT_IN;
case HiveParser.KW_SOME:
return SOME;
case HiveParser.KW_ALL:
return ALL;
default:
throw new SemanticException(SemanticAnalyzer.generateErrorMessage(opNode,
"Operator not supported in SubQuery use."));
}
}
}
public static class SubQueryTypeDef {
private final ASTNode ast;
private final SubQueryType type;
public SubQueryTypeDef(ASTNode ast, SubQueryType type) {
super();
this.ast = ast;
this.type = type;
}
public ASTNode getAst() {
return ast;
}
public SubQueryType getType() {
return type;
}
}
/*
* An expression is either the left/right side of an Equality predicate in the SubQuery where
* clause; or it is the entire conjunct. For e.g. if the Where Clause for a SubQuery is:
* where R1.X = R2.Y and R2.Z > 7
* Then the expressions analyzed are R1.X, R2.X ( the left and right sides of the Equality
* predicate); and R2.Z > 7.
*
* The ExprType tracks whether the expr:
* - has a reference to a SubQuery table source
* - has a reference to Outer(parent) Query table source
*/
static enum ExprType {
REFERS_NONE(false, false) {
@Override
public ExprType combine(ExprType other) {
return other;
}
},
REFERS_PARENT(true, false) {
@Override
public ExprType combine(ExprType other) {
switch(other) {
case REFERS_SUBQUERY:
case REFERS_BOTH:
return REFERS_BOTH;
default:
return this;
}
}
},
REFERS_SUBQUERY(false, true) {
@Override
public ExprType combine(ExprType other) {
switch(other) {
case REFERS_PARENT:
case REFERS_BOTH:
return REFERS_BOTH;
default:
return this;
}
}
},
REFERS_BOTH(true,true) {
@Override
public ExprType combine(ExprType other) {
return this;
}
};
final boolean refersParent;
final boolean refersSubQuery;
ExprType(boolean refersParent, boolean refersSubQuery) {
this.refersParent = refersParent;
this.refersSubQuery = refersSubQuery;
}
public boolean refersParent() {
return refersParent;
}
public boolean refersSubQuery() {
return refersSubQuery;
}
public abstract ExprType combine(ExprType other);
}
/*
* This class captures the information about a
* conjunct in the where clause of the SubQuery.
* For a equality predicate it capture for each side:
* - the AST
* - the type of Expression (basically what columns are referenced)
* - for Expressions that refer the parent it captures the
* parent's ColumnInfo. In case of outer Aggregation expressions
* we need this to introduce a new mapping in the OuterQuery
* RowResolver. A join condition must use qualified column references,
* so we generate a new name for the aggr expression and use it in the
* joining condition.
* For e.g.
* having exists ( select x from R2 where y = min(R1.z) )
* where the expression 'min(R1.z)' is from the outer Query.
* We give this expression a new name like 'R1._gby_sq_col_1'
* and use the join condition: R1._gby_sq_col_1 = R2.y
*/
static class Conjunct {
private final ASTNode leftExpr;
private final ASTNode rightExpr;
private final ExprType leftExprType;
private final ExprType rightExprType;
private final ColumnInfo leftOuterColInfo;
private final ColumnInfo rightOuterColInfo;
Conjunct(ASTNode leftExpr,
ASTNode rightExpr,
ExprType leftExprType,
ExprType rightExprType,
ColumnInfo leftOuterColInfo,
ColumnInfo rightOuterColInfo) {
super();
this.leftExpr = leftExpr;
this.rightExpr = rightExpr;
this.leftExprType = leftExprType;
this.rightExprType = rightExprType;
this.leftOuterColInfo = leftOuterColInfo;
this.rightOuterColInfo = rightOuterColInfo;
}
ASTNode getLeftExpr() {
return leftExpr;
}
ASTNode getRightExpr() {
return rightExpr;
}
ExprType getLeftExprType() {
return leftExprType;
}
ExprType getRightExprType() {
return rightExprType;
}
boolean eitherSideRefersBoth() {
if ( leftExprType == ExprType.REFERS_BOTH ) {
return true;
} else if ( rightExpr != null ) {
return rightExprType == ExprType.REFERS_BOTH;
}
return false;
}
boolean isCorrelated() {
if ( rightExpr != null ) {
return leftExprType.combine(rightExprType) == ExprType.REFERS_BOTH;
}
return false;
}
boolean refersOuterOnly() {
if ( rightExpr == null ) {
return leftExprType == ExprType.REFERS_PARENT;
}
return leftExprType.combine(rightExprType) == ExprType.REFERS_PARENT;
}
ColumnInfo getLeftOuterColInfo() {
return leftOuterColInfo;
}
ColumnInfo getRightOuterColInfo() {
return rightOuterColInfo;
}
}
class ConjunctAnalyzer {
RowResolver parentQueryRR;
boolean forHavingClause;
String parentQueryNewAlias;
SemanticNodeProcessor defaultExprProcessor;
Stack<Node> stack;
ConjunctAnalyzer(RowResolver parentQueryRR,
boolean forHavingClause,
String parentQueryNewAlias) {
this.parentQueryRR = parentQueryRR;
defaultExprProcessor = ExprNodeTypeCheck.getExprNodeDefaultExprProcessor();
this.forHavingClause = forHavingClause;
this.parentQueryNewAlias = parentQueryNewAlias;
stack = new Stack<Node>();
}
/*
* 1. On encountering a DOT, we attempt to resolve the leftmost name
* to the Parent Query.
* 2. An unqualified name is assumed to be a SubQuery reference.
* We don't attempt to resolve this to the Parent; because
* we require all Parent column references to be qualified.
* 3. All other expressions have a Type based on their children.
* An Expr w/o children is assumed to refer to neither.
*/
private Pair<ExprType, ColumnInfo> analyzeExpr(ASTNode expr) {
ColumnInfo cInfo = null;
if ( forHavingClause ) {
try {
cInfo = parentQueryRR.getExpression(expr);
if ( cInfo != null) {
return Pair.of(ExprType.REFERS_PARENT, cInfo);
}
} catch(SemanticException se) {
}
}
if ( expr.getType() == HiveParser.DOT) {
ASTNode dot = firstDot(expr);
cInfo = resolveDot(dot);
if ( cInfo != null ) {
return Pair.of(ExprType.REFERS_PARENT, cInfo);
}
return Pair.of(ExprType.REFERS_SUBQUERY, null);
} else if ( expr.getType() == HiveParser.TOK_TABLE_OR_COL ) {
return Pair.of(ExprType.REFERS_SUBQUERY, null);
} else {
ExprType exprType = ExprType.REFERS_NONE;
int cnt = expr.getChildCount();
for(int i=0; i < cnt; i++) {
ASTNode child = (ASTNode) expr.getChild(i);
exprType = exprType.combine(analyzeExpr(child).getLeft());
}
return Pair.of(exprType, null);
}
}
/*
* we analyze each side and let the
* left and right exprs in the Conjunct object.
*
* @return Conjunct contains details on the left and right side of the conjunct expression.
*/
Conjunct analyzeConjunct(ASTNode conjunct) throws SemanticException {
if(conjunct.getChildCount() == 2) {
ASTNode left = (ASTNode) conjunct.getChild(0);
ASTNode right = (ASTNode) conjunct.getChild(1);
Pair<ExprType, ColumnInfo> leftInfo = analyzeExpr(left);
Pair<ExprType, ColumnInfo> rightInfo = analyzeExpr(right);
return new Conjunct(left, right,
leftInfo.getLeft(), rightInfo.getLeft(),
leftInfo.getRight(), rightInfo.getRight());
} else {
Pair<ExprType, ColumnInfo> sqExprInfo = analyzeExpr(conjunct);
return new Conjunct(conjunct, null,
sqExprInfo.getLeft(), null,
sqExprInfo.getRight(), sqExprInfo.getRight());
}
}
/*
* Try to resolve a qualified name as a column reference on the Parent Query's RowResolver.
* Apply this logic on the leftmost(first) dot in an AST tree.
*/
protected ColumnInfo resolveDot(ASTNode node) {
try {
TypeCheckCtx tcCtx = new TypeCheckCtx(parentQueryRR);
String str = BaseSemanticAnalyzer.unescapeIdentifier(node.getChild(1).getText());
ExprNodeDesc idDesc = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, str.toLowerCase());
Object desc = defaultExprProcessor.process(node, stack, tcCtx, (Object) null, idDesc);
if (desc != null && desc instanceof ExprNodeColumnDesc) {
ExprNodeColumnDesc colDesc = (ExprNodeColumnDesc) desc;
String[] qualName = parentQueryRR.reverseLookup(colDesc.getColumn());
return parentQueryRR.get(qualName[0], qualName[1]);
}
} catch(SemanticException se) {
}
return null;
}
/*
* We want to resolve the leftmost name to the Parent Query's RR.
* Hence we do a left walk down the AST, until we reach the bottom most DOT.
*/
protected ASTNode firstDot(ASTNode dot) {
ASTNode firstChild = (ASTNode) dot.getChild(0);
if ( firstChild != null && firstChild.getType() == HiveParser.DOT) {
return firstDot(firstChild);
}
return dot;
}
}
/*
* When transforming a Not In SubQuery we need to check for nulls in the
* Joining expressions of the SubQuery. If there are nulls then the SubQuery always
* return false. For more details see
* https://issues.apache.org/jira/secure/attachment/12614003/SubQuerySpec.pdf
*
* Basically, SQL semantics say that:
* - R1.A not in (null, 1, 2, ...)
* is always false.
* A 'not in' operator is equivalent to a '<> all'. Since a not equal check with null
* returns false, a not in predicate against aset with a 'null' value always returns false.
*
* So for not in SubQuery predicates:
* - we join in a null count predicate.
* - And the joining condition is that the 'Null Count' query has a count of 0.
*
*/
class NotInCheck implements ISubQueryJoinInfo {
private static final String CNT_ALIAS = "c1";
/*
* expressions in SubQ that are joined to the Outer Query.
*/
List<ASTNode> subQryCorrExprs;
/*
* row resolver of the SubQuery.
* Set by the SemanticAnalyzer after the Plan for the SubQuery is genned.
* This is needed in case the SubQuery select list contains a TOK_ALLCOLREF
*/
RowResolver sqRR;
NotInCheck() {
subQryCorrExprs = new ArrayList<ASTNode>();
}
void addCorrExpr(ASTNode corrExpr) {
subQryCorrExprs.add(corrExpr);
}
public ASTNode getSubQueryAST() {
ASTNode ast = SubQueryUtils.buildNotInNullCheckQuery(
QBSubQuery.this.getSubQueryAST(),
QBSubQuery.this.getAlias(),
CNT_ALIAS,
subQryCorrExprs,
sqRR);
return ast;
}
public String getAlias() {
return QBSubQuery.this.getAlias() + "_notin_nullcheck";
}
public JoinType getJoinType() {
return JoinType.LEFTSEMI;
}
public ASTNode getJoinConditionAST() {
ASTNode ast =
SubQueryUtils.buildNotInNullJoinCond(getAlias(), CNT_ALIAS);
return ast;
}
public QBSubQuery getSubQuery() {
return QBSubQuery.this;
}
public String getOuterQueryId() {
return QBSubQuery.this.getOuterQueryId();
}
void setSQRR(RowResolver sqRR) {
this.sqRR = sqRR;
}
}
private final String outerQueryId;
private final int sqIdx;
private final String alias;
private final ASTNode subQueryAST;
private final ASTNode parentQueryExpression;
private final SubQueryTypeDef operator;
private boolean containsAggregationExprs;
private boolean hasCorrelation;
private ASTNode joinConditionAST;
private JoinType joinType;
private ASTNode postJoinConditionAST;
private int numCorrExprsinSQ;
private List<ASTNode> subQueryJoinAliasExprs;
private transient final ASTNodeOrigin originalSQASTOrigin;
/*
* tracks number of exprs from correlated predicates added to SQ select list.
*/
private int numOfCorrelationExprsAddedToSQSelect;
private boolean groupByAddedToSQ;
private int numOuterCorrExprsForHaving;
private NotInCheck notInCheck;
private QBSubQueryRewrite subQueryDiagnostic;
public QBSubQuery(String outerQueryId,
int sqIdx,
ASTNode subQueryAST,
ASTNode parentQueryExpression,
SubQueryTypeDef operator,
ASTNode originalSQAST,
Context ctx) {
super();
this.subQueryAST = subQueryAST;
this.parentQueryExpression = parentQueryExpression;
this.operator = operator;
this.outerQueryId = outerQueryId;
this.sqIdx = sqIdx;
this.alias = "sq_" + this.sqIdx;
this.numCorrExprsinSQ = 0;
this.numOuterCorrExprsForHaving = 0;
String s = ctx.getTokenRewriteStream().toString(
originalSQAST.getTokenStartIndex(), originalSQAST.getTokenStopIndex());
originalSQASTOrigin = new ASTNodeOrigin("SubQuery", alias, s, alias, originalSQAST);
numOfCorrelationExprsAddedToSQSelect = 0;
groupByAddedToSQ = false;
if ( operator.getType() == SubQueryType.NOT_IN ) {
notInCheck = new NotInCheck();
}
subQueryDiagnostic = SubQueryDiagnostic.getRewrite(this, ctx.getTokenRewriteStream(), ctx);
}
public ASTNode getSubQueryAST() {
return subQueryAST;
}
public SubQueryTypeDef getOperator() {
return operator;
}
public ASTNode getOriginalSubQueryASTForRewrite() {
return (operator.getType() == SubQueryType.NOT_EXISTS
|| operator.getType() == SubQueryType.NOT_IN ?
(ASTNode) originalSQASTOrigin.getUsageNode().getParent() :
originalSQASTOrigin.getUsageNode());
}
/**
* @param parentQueryRR
* @param forHavingClause
* @param outerQueryAlias
* @return true if it is correlated scalar subquery with an aggregate
* @throws SemanticException
*/
void subqueryRestrictionsCheck(RowResolver parentQueryRR,
boolean forHavingClause,
String outerQueryAlias)
throws SemanticException {
ASTNode insertClause = getChildFromSubqueryAST("Insert", HiveParser.TOK_INSERT);
ASTNode selectClause = (ASTNode) insertClause.getChild(1);
int selectExprStart = 0;
if ( selectClause.getChild(0).getType() == HiveParser.QUERY_HINT ) {
selectExprStart = 1;
}
/*
* Check.5.h :: For In and Not In the SubQuery must implicitly or
* explicitly only contain one select item.
*/
if ( operator.getType() != SubQueryType.EXISTS &&
operator.getType() != SubQueryType.NOT_EXISTS &&
selectClause.getChildCount() - selectExprStart > 1 ) {
subQueryAST.setOrigin(originalSQASTOrigin);
throw new SemanticException(ASTErrorUtils.getMsg(
ErrorMsg.INVALID_SUBQUERY_EXPRESSION.getMsg(),
subQueryAST, "SubQuery can contain only 1 item in Select List."));
}
boolean hasWindowing = false;
// we need to know if aggregate is COUNT since IN corr subq with count aggregate
// is not special cased later in subquery remove rule
for(int i= selectExprStart; i < selectClause.getChildCount(); i++ ) {
ASTNode selectItem = (ASTNode) selectClause.getChild(i);
int r = SubQueryUtils.checkAggOrWindowing(selectItem);
hasWindowing = hasWindowing | ( r == 3);
}
// figure out correlation and presence of non-equi join predicate
boolean hasCorrelation = false;
ASTNode whereClause = SubQueryUtils.subQueryWhere(insertClause);
if ( whereClause != null ) {
ASTNode searchCond = (ASTNode) whereClause.getChild(0);
List<ASTNode> conjuncts = new ArrayList<ASTNode>();
SubQueryUtils.extractConjuncts(searchCond, conjuncts);
ConjunctAnalyzer conjunctAnalyzer =
new ConjunctAnalyzer(parentQueryRR, forHavingClause, outerQueryAlias);
for (ASTNode conjunctAST : conjuncts) {
Conjunct conjunct = conjunctAnalyzer.analyzeConjunct(conjunctAST);
if (conjunct.isCorrelated()) {
hasCorrelation = true;
}
}
}
/*
* Restriction.14.h :: Only Correlated Exists/Not exists Sub Queries can contain Windowing clauses.
*/
if (operator.getType() != SubQueryType.EXISTS && operator.getType() != SubQueryType.NOT_EXISTS &&
hasWindowing && hasCorrelation) {
throw new CalciteSubquerySemanticException(ASTErrorUtils.getMsg(
ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg(),
subQueryAST, "Only Correlated Exists/Not exists Sub Queries can contain Windowing clauses."));
}
}
void validateAndRewriteAST(RowResolver outerQueryRR,
boolean forHavingClause,
String outerQueryAlias,
Set<String> outerQryAliases) throws SemanticException {
ASTNode fromClause = getChildFromSubqueryAST("From", HiveParser.TOK_FROM);
ASTNode insertClause = getChildFromSubqueryAST("Insert", HiveParser.TOK_INSERT);
ASTNode selectClause = (ASTNode) insertClause.getChild(1);
int selectExprStart = 0;
if ( selectClause.getChild(0).getType() == HiveParser.QUERY_HINT ) {
selectExprStart = 1;
}
/*
* Restriction.16.s :: Correlated Expression in Outer Query must not contain
* unqualified column references.
* disabled : if it's obvious, we allow unqualified refs
*/
/*
* Restriction 17.s :: SubQuery cannot use the same table alias as one used in
* the Outer Query.
*/
List<String> sqAliases = SubQueryUtils.getTableAliasesInSubQuery(fromClause);
String sharedAlias = null;
for(String s : sqAliases ) {
if ( outerQryAliases.contains(s) ) {
sharedAlias = s;
}
}
if ( sharedAlias != null) {
ASTNode whereClause = SubQueryUtils.subQueryWhere(insertClause);
}
/*
* Check.5.h :: For In and Not In the SubQuery must implicitly or
* explicitly only contain one select item.
*/
if ( operator.getType() != SubQueryType.EXISTS &&
operator.getType() != SubQueryType.NOT_EXISTS &&
selectClause.getChildCount() - selectExprStart > 1 ) {
subQueryAST.setOrigin(originalSQASTOrigin);
throw new SemanticException(ASTErrorUtils.getMsg(
ErrorMsg.INVALID_SUBQUERY_EXPRESSION.getMsg(),
subQueryAST, "SubQuery can contain only 1 item in Select List."));
}
containsAggregationExprs = false;
boolean containsWindowing = false;
for(int i= selectExprStart; i < selectClause.getChildCount(); i++ ) {
ASTNode selectItem = (ASTNode) selectClause.getChild(i);
int r = SubQueryUtils.checkAggOrWindowing(selectItem);
containsWindowing = containsWindowing | ( r == 3);
containsAggregationExprs = containsAggregationExprs | ( r == 1 );
}
rewrite(outerQueryRR, forHavingClause, outerQueryAlias, insertClause, selectClause);
/*
* Restriction.13.m :: In the case of an implied Group By on a
* correlated SubQuery, the SubQuery always returns 1 row.
* An exists on a SubQuery with an implied GBy will always return true.
* Whereas Algebraically transforming to a Join may not return true. See
* Specification doc for details.
* Similarly a not exists on a SubQuery with a implied GBY will always return false.
*/
if ( operator.getType() == SubQueryType.EXISTS &&
containsAggregationExprs &&
groupByAddedToSQ) {
throw new SemanticException(ASTErrorUtils.getMsg(
ErrorMsg.INVALID_SUBQUERY_EXPRESSION.getMsg(),
subQueryAST,
"An Exists predicate on SubQuery with implicit Aggregation(no Group By clause) " +
"cannot be rewritten. (predicate will always return true)."));
}
if ( operator.getType() == SubQueryType.NOT_EXISTS &&
containsAggregationExprs &&
groupByAddedToSQ) {
throw new SemanticException(ASTErrorUtils.getMsg(
ErrorMsg.INVALID_SUBQUERY_EXPRESSION.getMsg(),
subQueryAST,
"A Not Exists predicate on SubQuery with implicit Aggregation(no Group By clause) " +
"cannot be rewritten. (predicate will always return false)."));
}
/*
* Restriction.14.h :: Correlated Sub Queries cannot contain Windowing clauses.
*/
if ( containsWindowing && hasCorrelation ) {
throw new SemanticException(ASTErrorUtils.getMsg(
ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg(),
subQueryAST, "Correlated Sub Queries cannot contain Windowing clauses."));
}
/*
* Check.4.h :: For Exists and Not Exists, the Sub Query must
* have 1 or more correlated predicates.
*/
if ( ( operator.getType() == SubQueryType.EXISTS ||
operator.getType() == SubQueryType.NOT_EXISTS ) &&
!hasCorrelation ) {
throw new SemanticException(ASTErrorUtils.getMsg(
ErrorMsg.INVALID_SUBQUERY_EXPRESSION.getMsg(),
subQueryAST, "For Exists/Not Exists operator SubQuery must be Correlated."));
}
}
private ASTNode getChildFromSubqueryAST(String errorMsg, int type) throws SemanticException {
ASTNode childAST = (ASTNode) subQueryAST.getFirstChildWithType(type);
if (childAST == null && errorMsg != null) {
subQueryAST.setOrigin(originalSQASTOrigin);
throw new SemanticException(ASTErrorUtils.getMsg(
ErrorMsg.INVALID_SUBQUERY_EXPRESSION.getMsg(),
subQueryAST, errorMsg + " clause is missing in SubQuery."));
}
return childAST;
}
private void setJoinType() {
if ( operator.getType() == SubQueryType.NOT_IN ||
operator.getType() == SubQueryType.NOT_EXISTS ) {
joinType = JoinType.LEFTOUTER;
} else {
joinType = JoinType.LEFTSEMI;
}
}
void buildJoinCondition(RowResolver outerQueryRR, RowResolver sqRR,
boolean forHavingClause,
String outerQueryAlias) throws SemanticException {
ASTNode parentQueryJoinCond = null;
if ( parentQueryExpression != null ) {
ColumnInfo outerQueryCol = null;
try {
outerQueryCol = outerQueryRR.getExpression(parentQueryExpression);
} catch(SemanticException se) {
// ignore
}
ASTNode parentExpr = parentQueryExpression;
if (!forHavingClause) {
Set<String> aliases = outerQueryRR.getRslvMap().keySet();
if (notInCheck != null) {
aliases.remove(notInCheck.getAlias());
}
String tableAlias = aliases.size() == 1 ? aliases.iterator().next() : null;
parentExpr =
SubQueryUtils.setQualifiedColumnReferences(parentExpr, tableAlias);
if (parentExpr == null) {
subQueryAST.setOrigin(originalSQASTOrigin);
throw new SemanticException(ASTErrorUtils.getMsg(
ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg(),
parentQueryExpression,
"Correlating expression contains ambiguous column references."));
}
}
parentQueryJoinCond = SubQueryUtils.buildOuterQryToSQJoinCond(
parentExpr,
alias,
sqRR);
if ( outerQueryCol != null ) {
rewriteCorrConjunctForHaving(parentQueryJoinCond, true,
outerQueryAlias, outerQueryRR, outerQueryCol);
}
subQueryDiagnostic.addJoinCondition(parentQueryJoinCond, outerQueryCol != null, true);
}
joinConditionAST = SubQueryUtils.andAST(parentQueryJoinCond, joinConditionAST);
setJoinType();
if ( joinType == JoinType.LEFTOUTER ) {
if ( operator.getType() == SubQueryType.NOT_EXISTS && hasCorrelation ) {
postJoinConditionAST = SubQueryUtils.buildPostJoinNullCheck(subQueryJoinAliasExprs);
} else if ( operator.getType() == SubQueryType.NOT_IN ) {
postJoinConditionAST = SubQueryUtils.buildOuterJoinPostCond(alias, sqRR);
}
}
}
ASTNode updateOuterQueryFilter(ASTNode outerQryFilter) {
if (postJoinConditionAST == null ) {
return outerQryFilter;
}
subQueryDiagnostic.addPostJoinCondition(postJoinConditionAST);
if ( outerQryFilter == null ) {
return postJoinConditionAST;
}
ASTNode node = SubQueryUtils.andAST(outerQryFilter, postJoinConditionAST);
return node;
}
String getNextCorrExprAlias() {
return "sq_corr_" + numCorrExprsinSQ++;
}
/*
* - If the SubQuery has no where clause, there is nothing to rewrite.
* - Decompose SubQuery where clause into list of Top level conjuncts.
* - For each conjunct
* - Break down the conjunct into (LeftExpr, LeftExprType, RightExpr,
* RightExprType)
* - If the top level operator is an Equality Operator we will break
* it down into left and right; in all other case there is only a
* lhs.
* - The ExprType is based on whether the Expr. refers to the Parent
* Query table sources, refers to the SubQuery sources or both.
* - We assume an unqualified Column refers to a SubQuery table source.
* This is because we require Parent Column references to be qualified
* within the SubQuery.
* - If the lhs or rhs expr refers to both Parent and SubQuery sources,
* we flag this as Unsupported.
* - If the conjunct as a whole, only refers to the Parent Query sources,
* we flag this as an Error.
* - A conjunct is Correlated if the lhs refers to SubQuery sources and rhs
* refers to Parent Query sources or the reverse.
* - Say the lhs refers to SubQuery and rhs refers to Parent Query sources; the
* other case is handled analogously.
* - remove this conjunct from the SubQuery where clause.
* - for the SubQuery expression(lhs) construct a new alias
* - in the correlated predicate, replace the SubQuery
* expression(lhs) with the alias AST.
* - add this altered predicate to the Join predicate tracked by the
* QBSubQuery object.
* - add the alias AST to a list of subQueryJoinAliasExprs. This
* list is used in the case of Outer Joins to add null check
* predicates to the Outer Query's where clause.
* - Add the SubQuery expression with the alias as a SelectItem to
* the SubQuery's SelectList.
* - In case this SubQuery contains aggregation expressions add this SubQuery
* expression to its GroupBy; add it to the front of the GroupBy.
* - If predicate is not correlated, let it remain in the SubQuery
* where clause.
* Additional things for Having clause:
* - A correlation predicate may refer to an aggregation expression.
* - This introduces 2 twists to the rewrite:
* a. When analyzing equality predicates we need to analyze each side
* to see if it is an aggregation expression from the Outer Query.
* So for e.g. this is a valid correlation predicate:
* R2.x = min(R1.y)
* Where R1 is an outer table reference, and R2 is a SubQuery table reference.
* b. When hoisting the correlation predicate to a join predicate, we need to
* rewrite it to be in the form the Join code allows: so the predict needs
* to contain a qualified column references.
* We handle this by generating a new name for the aggregation expression,
* like R1._gby_sq_col_1 and adding this mapping to the Outer Query's
* Row Resolver. Then we construct a joining predicate using this new
* name; so in our e.g. the condition would be: R2.x = R1._gby_sq_col_1
*/
private void rewrite(RowResolver parentQueryRR,
boolean forHavingClause,
String outerQueryAlias, ASTNode insertClause, ASTNode selectClause) throws SemanticException {
ASTNode whereClause = SubQueryUtils.subQueryWhere(insertClause);
if ( whereClause == null ) {
return;
}
ASTNode searchCond = (ASTNode) whereClause.getChild(0);
List<ASTNode> conjuncts = new ArrayList<ASTNode>();
SubQueryUtils.extractConjuncts(searchCond, conjuncts);
ConjunctAnalyzer conjunctAnalyzer = new ConjunctAnalyzer(parentQueryRR,
forHavingClause, outerQueryAlias);
ASTNode sqNewSearchCond = null;
for(ASTNode conjunctAST : conjuncts) {
Conjunct conjunct = conjunctAnalyzer.analyzeConjunct(conjunctAST);
/*
* Check.12.h :: SubQuery predicates cannot only refer to Outer Query columns.
*/
if ( conjunct.refersOuterOnly() ) {
throw new SemanticException(ASTErrorUtils.getMsg(
ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg(),
conjunctAST,
"SubQuery expression refers to Outer query expressions only."));
}
if ( conjunct.isCorrelated() ) {
hasCorrelation = true;
subQueryJoinAliasExprs = new ArrayList<ASTNode>();
String exprAlias = getNextCorrExprAlias();
ASTNode sqExprAlias = SubQueryUtils.createAliasAST(exprAlias);
ASTNode sqExprForCorr = SubQueryUtils.createColRefAST(alias, exprAlias);
boolean corrCondLeftIsRewritten = false;
boolean corrCondRightIsRewritten = false;
if ( conjunct.getLeftExprType().refersSubQuery() ) {
corrCondLeftIsRewritten = true;
if ( forHavingClause && conjunct.getRightOuterColInfo() != null ) {
corrCondRightIsRewritten = true;
rewriteCorrConjunctForHaving(conjunctAST, false, outerQueryAlias,
parentQueryRR, conjunct.getRightOuterColInfo());
}
ASTNode joinPredicate = SubQueryUtils.alterCorrelatedPredicate(
conjunctAST, sqExprForCorr, true);
joinConditionAST = SubQueryUtils.andAST(joinConditionAST, joinPredicate);
subQueryJoinAliasExprs.add(sqExprForCorr);
ASTNode selExpr = SubQueryUtils.createSelectItem(conjunct.getLeftExpr(), sqExprAlias);
selectClause.addChild(selExpr);
subQueryDiagnostic.addSelectClauseRewrite(conjunct.getLeftExpr(), exprAlias);
numOfCorrelationExprsAddedToSQSelect++;
if ( containsAggregationExprs ) {
ASTNode gBy = getSubQueryGroupByAST();
SubQueryUtils.addGroupExpressionToFront(gBy, conjunct.getLeftExpr());
subQueryDiagnostic.addGByClauseRewrite(conjunct.getLeftExpr());
}
if ( notInCheck != null ) {
notInCheck.addCorrExpr((ASTNode)conjunctAST.getChild(0));
}
subQueryDiagnostic.addJoinCondition(conjunctAST, corrCondLeftIsRewritten, corrCondRightIsRewritten);
} else {
corrCondRightIsRewritten = true;
if ( forHavingClause && conjunct.getLeftOuterColInfo() != null ) {
corrCondLeftIsRewritten = true;
rewriteCorrConjunctForHaving(conjunctAST, true, outerQueryAlias,
parentQueryRR, conjunct.getLeftOuterColInfo());
}
ASTNode joinPredicate = SubQueryUtils.alterCorrelatedPredicate(
conjunctAST, sqExprForCorr, false);
joinConditionAST = SubQueryUtils.andAST(joinConditionAST, joinPredicate);
subQueryJoinAliasExprs.add(sqExprForCorr);
ASTNode selExpr = SubQueryUtils.createSelectItem(conjunct.getRightExpr(), sqExprAlias);
selectClause.addChild(selExpr);
subQueryDiagnostic.addSelectClauseRewrite(conjunct.getRightExpr(), exprAlias);
numOfCorrelationExprsAddedToSQSelect++;
if ( containsAggregationExprs ) {
ASTNode gBy = getSubQueryGroupByAST();
SubQueryUtils.addGroupExpressionToFront(gBy, conjunct.getRightExpr());
subQueryDiagnostic.addGByClauseRewrite(conjunct.getRightExpr());
}
if ( notInCheck != null ) {
notInCheck.addCorrExpr((ASTNode)conjunctAST.getChild(1));
}
subQueryDiagnostic.addJoinCondition(conjunctAST, corrCondLeftIsRewritten, corrCondRightIsRewritten);
}
} else {
sqNewSearchCond = SubQueryUtils.andAST(sqNewSearchCond, conjunctAST);
subQueryDiagnostic.addWhereClauseRewrite(conjunctAST);
}
}
if ( sqNewSearchCond != searchCond ) {
if ( sqNewSearchCond == null ) {
/*
* for now just adding a true condition(1=1) to where clause.
* Can remove the where clause from the AST; requires moving all subsequent children
* left.
*/
sqNewSearchCond = SubQueryUtils.constructTrueCond();
subQueryDiagnostic.addWhereClauseRewrite("1 = 1");
}
whereClause.setChild(0, sqNewSearchCond);
}
}
/*
* called if the SubQuery is Agg and Correlated.
* if SQ doesn't have a GroupBy, it is added to the SQ AST.
*/
private ASTNode getSubQueryGroupByAST() {
ASTNode groupBy = null;
if ( subQueryAST.getChild(1).getChildCount() > 3 &&
subQueryAST.getChild(1).getChild(3).getType() == HiveParser.TOK_GROUPBY ) {
groupBy = (ASTNode) subQueryAST.getChild(1).getChild(3);
}
if ( groupBy != null ) {
return groupBy;
}
groupBy = SubQueryUtils.buildGroupBy();
groupByAddedToSQ = true;
List<ASTNode> newChildren = new ArrayList<ASTNode>();
newChildren.add(groupBy);
if ( subQueryAST.getChildCount() > 3) {
for( int i = subQueryAST.getChildCount() - 1; i >= 3; i-- ) {
ASTNode child = (ASTNode) subQueryAST.getChild(i);
newChildren.add(child);
}
}
for(ASTNode child : newChildren ) {
subQueryAST.addChild(child);
}
subQueryDiagnostic.setAddGroupByClause();
return groupBy;
}
public String getOuterQueryId() {
return outerQueryId;
}
public JoinType getJoinType() {
return joinType;
}
public String getAlias() {
return alias;
}
public ASTNode getJoinConditionAST() {
return joinConditionAST;
}
public int getNumOfCorrelationExprsAddedToSQSelect() {
return numOfCorrelationExprsAddedToSQSelect;
}
public QBSubQueryRewrite getDiagnostic() {
return subQueryDiagnostic;
}
public QBSubQuery getSubQuery() {
return this;
}
NotInCheck getNotInCheck() {
return notInCheck;
}
private void rewriteCorrConjunctForHaving(ASTNode conjunctASTNode,
boolean refersLeft,
String outerQueryAlias,
RowResolver outerQueryRR,
ColumnInfo outerQueryCol) {
String newColAlias = "_gby_sq_col_" + numOuterCorrExprsForHaving++;
ASTNode outerExprForCorr = SubQueryUtils.createColRefAST(outerQueryAlias, newColAlias);
if ( refersLeft ) {
conjunctASTNode.setChild(0, outerExprForCorr);
} else {
conjunctASTNode.setChild(1, outerExprForCorr);
}
outerQueryRR.put(outerQueryAlias, newColAlias, outerQueryCol);
}
}
|
googleapis/google-cloud-java | 37,799 | java-aiplatform/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/DeploymentResourcePoolServiceClientTest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.aiplatform.v1;
import static com.google.cloud.aiplatform.v1.DeploymentResourcePoolServiceClient.ListDeploymentResourcePoolsPagedResponse;
import static com.google.cloud.aiplatform.v1.DeploymentResourcePoolServiceClient.ListLocationsPagedResponse;
import static com.google.cloud.aiplatform.v1.DeploymentResourcePoolServiceClient.QueryDeployedModelsPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.Lists;
import com.google.iam.v1.AuditConfig;
import com.google.iam.v1.Binding;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.GetPolicyOptions;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.longrunning.Operation;
import com.google.protobuf.AbstractMessage;
import com.google.protobuf.Any;
import com.google.protobuf.ByteString;
import com.google.protobuf.Empty;
import com.google.protobuf.FieldMask;
import com.google.protobuf.Timestamp;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class DeploymentResourcePoolServiceClientTest {
private static MockDeploymentResourcePoolService mockDeploymentResourcePoolService;
private static MockIAMPolicy mockIAMPolicy;
private static MockLocations mockLocations;
private static MockServiceHelper mockServiceHelper;
private LocalChannelProvider channelProvider;
private DeploymentResourcePoolServiceClient client;
@BeforeClass
public static void startStaticServer() {
mockDeploymentResourcePoolService = new MockDeploymentResourcePoolService();
mockLocations = new MockLocations();
mockIAMPolicy = new MockIAMPolicy();
mockServiceHelper =
new MockServiceHelper(
UUID.randomUUID().toString(),
Arrays.<MockGrpcService>asList(
mockDeploymentResourcePoolService, mockLocations, mockIAMPolicy));
mockServiceHelper.start();
}
@AfterClass
public static void stopServer() {
mockServiceHelper.stop();
}
@Before
public void setUp() throws IOException {
mockServiceHelper.reset();
channelProvider = mockServiceHelper.createChannelProvider();
DeploymentResourcePoolServiceSettings settings =
DeploymentResourcePoolServiceSettings.newBuilder()
.setTransportChannelProvider(channelProvider)
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = DeploymentResourcePoolServiceClient.create(settings);
}
@After
public void tearDown() throws Exception {
client.close();
}
@Test
public void createDeploymentResourcePoolTest() throws Exception {
DeploymentResourcePool expectedResponse =
DeploymentResourcePool.newBuilder()
.setName(
DeploymentResourcePoolName.of(
"[PROJECT]", "[LOCATION]", "[DEPLOYMENT_RESOURCE_POOL]")
.toString())
.setDedicatedResources(DedicatedResources.newBuilder().build())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.setDisableContainerLogging(true)
.setCreateTime(Timestamp.newBuilder().build())
.setSatisfiesPzs(true)
.setSatisfiesPzi(true)
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createDeploymentResourcePoolTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDeploymentResourcePoolService.addResponse(resultOperation);
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
DeploymentResourcePool deploymentResourcePool = DeploymentResourcePool.newBuilder().build();
String deploymentResourcePoolId = "deploymentResourcePoolId1805697578";
DeploymentResourcePool actualResponse =
client
.createDeploymentResourcePoolAsync(
parent, deploymentResourcePool, deploymentResourcePoolId)
.get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDeploymentResourcePoolService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateDeploymentResourcePoolRequest actualRequest =
((CreateDeploymentResourcePoolRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertEquals(deploymentResourcePool, actualRequest.getDeploymentResourcePool());
Assert.assertEquals(deploymentResourcePoolId, actualRequest.getDeploymentResourcePoolId());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createDeploymentResourcePoolExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDeploymentResourcePoolService.addException(exception);
try {
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
DeploymentResourcePool deploymentResourcePool = DeploymentResourcePool.newBuilder().build();
String deploymentResourcePoolId = "deploymentResourcePoolId1805697578";
client
.createDeploymentResourcePoolAsync(
parent, deploymentResourcePool, deploymentResourcePoolId)
.get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void createDeploymentResourcePoolTest2() throws Exception {
DeploymentResourcePool expectedResponse =
DeploymentResourcePool.newBuilder()
.setName(
DeploymentResourcePoolName.of(
"[PROJECT]", "[LOCATION]", "[DEPLOYMENT_RESOURCE_POOL]")
.toString())
.setDedicatedResources(DedicatedResources.newBuilder().build())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.setDisableContainerLogging(true)
.setCreateTime(Timestamp.newBuilder().build())
.setSatisfiesPzs(true)
.setSatisfiesPzi(true)
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createDeploymentResourcePoolTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDeploymentResourcePoolService.addResponse(resultOperation);
String parent = "parent-995424086";
DeploymentResourcePool deploymentResourcePool = DeploymentResourcePool.newBuilder().build();
String deploymentResourcePoolId = "deploymentResourcePoolId1805697578";
DeploymentResourcePool actualResponse =
client
.createDeploymentResourcePoolAsync(
parent, deploymentResourcePool, deploymentResourcePoolId)
.get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDeploymentResourcePoolService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateDeploymentResourcePoolRequest actualRequest =
((CreateDeploymentResourcePoolRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertEquals(deploymentResourcePool, actualRequest.getDeploymentResourcePool());
Assert.assertEquals(deploymentResourcePoolId, actualRequest.getDeploymentResourcePoolId());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createDeploymentResourcePoolExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDeploymentResourcePoolService.addException(exception);
try {
String parent = "parent-995424086";
DeploymentResourcePool deploymentResourcePool = DeploymentResourcePool.newBuilder().build();
String deploymentResourcePoolId = "deploymentResourcePoolId1805697578";
client
.createDeploymentResourcePoolAsync(
parent, deploymentResourcePool, deploymentResourcePoolId)
.get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void getDeploymentResourcePoolTest() throws Exception {
DeploymentResourcePool expectedResponse =
DeploymentResourcePool.newBuilder()
.setName(
DeploymentResourcePoolName.of(
"[PROJECT]", "[LOCATION]", "[DEPLOYMENT_RESOURCE_POOL]")
.toString())
.setDedicatedResources(DedicatedResources.newBuilder().build())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.setDisableContainerLogging(true)
.setCreateTime(Timestamp.newBuilder().build())
.setSatisfiesPzs(true)
.setSatisfiesPzi(true)
.build();
mockDeploymentResourcePoolService.addResponse(expectedResponse);
DeploymentResourcePoolName name =
DeploymentResourcePoolName.of("[PROJECT]", "[LOCATION]", "[DEPLOYMENT_RESOURCE_POOL]");
DeploymentResourcePool actualResponse = client.getDeploymentResourcePool(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDeploymentResourcePoolService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetDeploymentResourcePoolRequest actualRequest =
((GetDeploymentResourcePoolRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getDeploymentResourcePoolExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDeploymentResourcePoolService.addException(exception);
try {
DeploymentResourcePoolName name =
DeploymentResourcePoolName.of("[PROJECT]", "[LOCATION]", "[DEPLOYMENT_RESOURCE_POOL]");
client.getDeploymentResourcePool(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getDeploymentResourcePoolTest2() throws Exception {
DeploymentResourcePool expectedResponse =
DeploymentResourcePool.newBuilder()
.setName(
DeploymentResourcePoolName.of(
"[PROJECT]", "[LOCATION]", "[DEPLOYMENT_RESOURCE_POOL]")
.toString())
.setDedicatedResources(DedicatedResources.newBuilder().build())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.setDisableContainerLogging(true)
.setCreateTime(Timestamp.newBuilder().build())
.setSatisfiesPzs(true)
.setSatisfiesPzi(true)
.build();
mockDeploymentResourcePoolService.addResponse(expectedResponse);
String name = "name3373707";
DeploymentResourcePool actualResponse = client.getDeploymentResourcePool(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDeploymentResourcePoolService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetDeploymentResourcePoolRequest actualRequest =
((GetDeploymentResourcePoolRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getDeploymentResourcePoolExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDeploymentResourcePoolService.addException(exception);
try {
String name = "name3373707";
client.getDeploymentResourcePool(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listDeploymentResourcePoolsTest() throws Exception {
DeploymentResourcePool responsesElement = DeploymentResourcePool.newBuilder().build();
ListDeploymentResourcePoolsResponse expectedResponse =
ListDeploymentResourcePoolsResponse.newBuilder()
.setNextPageToken("")
.addAllDeploymentResourcePools(Arrays.asList(responsesElement))
.build();
mockDeploymentResourcePoolService.addResponse(expectedResponse);
ProjectName parent = ProjectName.of("[PROJECT]");
ListDeploymentResourcePoolsPagedResponse pagedListResponse =
client.listDeploymentResourcePools(parent);
List<DeploymentResourcePool> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getDeploymentResourcePoolsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockDeploymentResourcePoolService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListDeploymentResourcePoolsRequest actualRequest =
((ListDeploymentResourcePoolsRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listDeploymentResourcePoolsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDeploymentResourcePoolService.addException(exception);
try {
ProjectName parent = ProjectName.of("[PROJECT]");
client.listDeploymentResourcePools(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listDeploymentResourcePoolsTest2() throws Exception {
DeploymentResourcePool responsesElement = DeploymentResourcePool.newBuilder().build();
ListDeploymentResourcePoolsResponse expectedResponse =
ListDeploymentResourcePoolsResponse.newBuilder()
.setNextPageToken("")
.addAllDeploymentResourcePools(Arrays.asList(responsesElement))
.build();
mockDeploymentResourcePoolService.addResponse(expectedResponse);
String parent = "parent-995424086";
ListDeploymentResourcePoolsPagedResponse pagedListResponse =
client.listDeploymentResourcePools(parent);
List<DeploymentResourcePool> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getDeploymentResourcePoolsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockDeploymentResourcePoolService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListDeploymentResourcePoolsRequest actualRequest =
((ListDeploymentResourcePoolsRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listDeploymentResourcePoolsExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDeploymentResourcePoolService.addException(exception);
try {
String parent = "parent-995424086";
client.listDeploymentResourcePools(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void updateDeploymentResourcePoolTest() throws Exception {
DeploymentResourcePool expectedResponse =
DeploymentResourcePool.newBuilder()
.setName(
DeploymentResourcePoolName.of(
"[PROJECT]", "[LOCATION]", "[DEPLOYMENT_RESOURCE_POOL]")
.toString())
.setDedicatedResources(DedicatedResources.newBuilder().build())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.setDisableContainerLogging(true)
.setCreateTime(Timestamp.newBuilder().build())
.setSatisfiesPzs(true)
.setSatisfiesPzi(true)
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("updateDeploymentResourcePoolTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDeploymentResourcePoolService.addResponse(resultOperation);
DeploymentResourcePool deploymentResourcePool = DeploymentResourcePool.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
DeploymentResourcePool actualResponse =
client.updateDeploymentResourcePoolAsync(deploymentResourcePool, updateMask).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDeploymentResourcePoolService.getRequests();
Assert.assertEquals(1, actualRequests.size());
UpdateDeploymentResourcePoolRequest actualRequest =
((UpdateDeploymentResourcePoolRequest) actualRequests.get(0));
Assert.assertEquals(deploymentResourcePool, actualRequest.getDeploymentResourcePool());
Assert.assertEquals(updateMask, actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void updateDeploymentResourcePoolExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDeploymentResourcePoolService.addException(exception);
try {
DeploymentResourcePool deploymentResourcePool = DeploymentResourcePool.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateDeploymentResourcePoolAsync(deploymentResourcePool, updateMask).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void deleteDeploymentResourcePoolTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("deleteDeploymentResourcePoolTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDeploymentResourcePoolService.addResponse(resultOperation);
DeploymentResourcePoolName name =
DeploymentResourcePoolName.of("[PROJECT]", "[LOCATION]", "[DEPLOYMENT_RESOURCE_POOL]");
client.deleteDeploymentResourcePoolAsync(name).get();
List<AbstractMessage> actualRequests = mockDeploymentResourcePoolService.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteDeploymentResourcePoolRequest actualRequest =
((DeleteDeploymentResourcePoolRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteDeploymentResourcePoolExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDeploymentResourcePoolService.addException(exception);
try {
DeploymentResourcePoolName name =
DeploymentResourcePoolName.of("[PROJECT]", "[LOCATION]", "[DEPLOYMENT_RESOURCE_POOL]");
client.deleteDeploymentResourcePoolAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void deleteDeploymentResourcePoolTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("deleteDeploymentResourcePoolTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDeploymentResourcePoolService.addResponse(resultOperation);
String name = "name3373707";
client.deleteDeploymentResourcePoolAsync(name).get();
List<AbstractMessage> actualRequests = mockDeploymentResourcePoolService.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteDeploymentResourcePoolRequest actualRequest =
((DeleteDeploymentResourcePoolRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteDeploymentResourcePoolExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDeploymentResourcePoolService.addException(exception);
try {
String name = "name3373707";
client.deleteDeploymentResourcePoolAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void queryDeployedModelsTest() throws Exception {
DeployedModel responsesElement = DeployedModel.newBuilder().build();
QueryDeployedModelsResponse expectedResponse =
QueryDeployedModelsResponse.newBuilder()
.setNextPageToken("")
.addAllDeployedModels(Arrays.asList(responsesElement))
.build();
mockDeploymentResourcePoolService.addResponse(expectedResponse);
String deploymentResourcePool = "deploymentResourcePool-1928845137";
QueryDeployedModelsPagedResponse pagedListResponse =
client.queryDeployedModels(deploymentResourcePool);
List<DeployedModel> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getDeployedModelsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockDeploymentResourcePoolService.getRequests();
Assert.assertEquals(1, actualRequests.size());
QueryDeployedModelsRequest actualRequest = ((QueryDeployedModelsRequest) actualRequests.get(0));
Assert.assertEquals(deploymentResourcePool, actualRequest.getDeploymentResourcePool());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void queryDeployedModelsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDeploymentResourcePoolService.addException(exception);
try {
String deploymentResourcePool = "deploymentResourcePool-1928845137";
client.queryDeployedModels(deploymentResourcePool);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listLocationsTest() throws Exception {
Location responsesElement = Location.newBuilder().build();
ListLocationsResponse expectedResponse =
ListLocationsResponse.newBuilder()
.setNextPageToken("")
.addAllLocations(Arrays.asList(responsesElement))
.build();
mockLocations.addResponse(expectedResponse);
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
ListLocationsPagedResponse pagedListResponse = client.listLocations(request);
List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListLocationsRequest actualRequest = ((ListLocationsRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertEquals(request.getFilter(), actualRequest.getFilter());
Assert.assertEquals(request.getPageSize(), actualRequest.getPageSize());
Assert.assertEquals(request.getPageToken(), actualRequest.getPageToken());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listLocationsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
client.listLocations(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getLocationTest() throws Exception {
Location expectedResponse =
Location.newBuilder()
.setName("name3373707")
.setLocationId("locationId1541836720")
.setDisplayName("displayName1714148973")
.putAllLabels(new HashMap<String, String>())
.setMetadata(Any.newBuilder().build())
.build();
mockLocations.addResponse(expectedResponse);
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
Location actualResponse = client.getLocation(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetLocationRequest actualRequest = ((GetLocationRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getLocationExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
client.getLocation(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void setIamPolicyTest() throws Exception {
Policy expectedResponse =
Policy.newBuilder()
.setVersion(351608024)
.addAllBindings(new ArrayList<Binding>())
.addAllAuditConfigs(new ArrayList<AuditConfig>())
.setEtag(ByteString.EMPTY)
.build();
mockIAMPolicy.addResponse(expectedResponse);
SetIamPolicyRequest request =
SetIamPolicyRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.setPolicy(Policy.newBuilder().build())
.setUpdateMask(FieldMask.newBuilder().build())
.build();
Policy actualResponse = client.setIamPolicy(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
SetIamPolicyRequest actualRequest = ((SetIamPolicyRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getPolicy(), actualRequest.getPolicy());
Assert.assertEquals(request.getUpdateMask(), actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void setIamPolicyExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
SetIamPolicyRequest request =
SetIamPolicyRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName(
"[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.setPolicy(Policy.newBuilder().build())
.setUpdateMask(FieldMask.newBuilder().build())
.build();
client.setIamPolicy(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getIamPolicyTest() throws Exception {
Policy expectedResponse =
Policy.newBuilder()
.setVersion(351608024)
.addAllBindings(new ArrayList<Binding>())
.addAllAuditConfigs(new ArrayList<AuditConfig>())
.setEtag(ByteString.EMPTY)
.build();
mockIAMPolicy.addResponse(expectedResponse);
GetIamPolicyRequest request =
GetIamPolicyRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.setOptions(GetPolicyOptions.newBuilder().build())
.build();
Policy actualResponse = client.getIamPolicy(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetIamPolicyRequest actualRequest = ((GetIamPolicyRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getOptions(), actualRequest.getOptions());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getIamPolicyExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
GetIamPolicyRequest request =
GetIamPolicyRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName(
"[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.setOptions(GetPolicyOptions.newBuilder().build())
.build();
client.getIamPolicy(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void testIamPermissionsTest() throws Exception {
TestIamPermissionsResponse expectedResponse =
TestIamPermissionsResponse.newBuilder().addAllPermissions(new ArrayList<String>()).build();
mockIAMPolicy.addResponse(expectedResponse);
TestIamPermissionsRequest request =
TestIamPermissionsRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.addAllPermissions(new ArrayList<String>())
.build();
TestIamPermissionsResponse actualResponse = client.testIamPermissions(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
TestIamPermissionsRequest actualRequest = ((TestIamPermissionsRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getPermissionsList(), actualRequest.getPermissionsList());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void testIamPermissionsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
TestIamPermissionsRequest request =
TestIamPermissionsRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName(
"[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.addAllPermissions(new ArrayList<String>())
.build();
client.testIamPermissions(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
|
googleapis/google-cloud-java | 37,734 | java-analytics-admin/proto-google-analytics-admin-v1alpha/src/main/java/com/google/analytics/admin/v1alpha/ListPropertiesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/admin/v1alpha/analytics_admin.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.admin.v1alpha;
/**
*
*
* <pre>
* Response message for ListProperties RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.ListPropertiesResponse}
*/
public final class ListPropertiesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.admin.v1alpha.ListPropertiesResponse)
ListPropertiesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListPropertiesResponse.newBuilder() to construct.
private ListPropertiesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListPropertiesResponse() {
properties_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListPropertiesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_ListPropertiesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_ListPropertiesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.ListPropertiesResponse.class,
com.google.analytics.admin.v1alpha.ListPropertiesResponse.Builder.class);
}
public static final int PROPERTIES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.analytics.admin.v1alpha.Property> properties_;
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.analytics.admin.v1alpha.Property> getPropertiesList() {
return properties_;
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.analytics.admin.v1alpha.PropertyOrBuilder>
getPropertiesOrBuilderList() {
return properties_;
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
@java.lang.Override
public int getPropertiesCount() {
return properties_.size();
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.Property getProperties(int index) {
return properties_.get(index);
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.PropertyOrBuilder getPropertiesOrBuilder(int index) {
return properties_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < properties_.size(); i++) {
output.writeMessage(1, properties_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < properties_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, properties_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.admin.v1alpha.ListPropertiesResponse)) {
return super.equals(obj);
}
com.google.analytics.admin.v1alpha.ListPropertiesResponse other =
(com.google.analytics.admin.v1alpha.ListPropertiesResponse) obj;
if (!getPropertiesList().equals(other.getPropertiesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getPropertiesCount() > 0) {
hash = (37 * hash) + PROPERTIES_FIELD_NUMBER;
hash = (53 * hash) + getPropertiesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.admin.v1alpha.ListPropertiesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.ListPropertiesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.ListPropertiesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.ListPropertiesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.ListPropertiesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.ListPropertiesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.ListPropertiesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.ListPropertiesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.ListPropertiesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.ListPropertiesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.ListPropertiesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.ListPropertiesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.analytics.admin.v1alpha.ListPropertiesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for ListProperties RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.ListPropertiesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.admin.v1alpha.ListPropertiesResponse)
com.google.analytics.admin.v1alpha.ListPropertiesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_ListPropertiesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_ListPropertiesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.ListPropertiesResponse.class,
com.google.analytics.admin.v1alpha.ListPropertiesResponse.Builder.class);
}
// Construct using com.google.analytics.admin.v1alpha.ListPropertiesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (propertiesBuilder_ == null) {
properties_ = java.util.Collections.emptyList();
} else {
properties_ = null;
propertiesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_ListPropertiesResponse_descriptor;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.ListPropertiesResponse getDefaultInstanceForType() {
return com.google.analytics.admin.v1alpha.ListPropertiesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.ListPropertiesResponse build() {
com.google.analytics.admin.v1alpha.ListPropertiesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.ListPropertiesResponse buildPartial() {
com.google.analytics.admin.v1alpha.ListPropertiesResponse result =
new com.google.analytics.admin.v1alpha.ListPropertiesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.analytics.admin.v1alpha.ListPropertiesResponse result) {
if (propertiesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
properties_ = java.util.Collections.unmodifiableList(properties_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.properties_ = properties_;
} else {
result.properties_ = propertiesBuilder_.build();
}
}
private void buildPartial0(com.google.analytics.admin.v1alpha.ListPropertiesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.admin.v1alpha.ListPropertiesResponse) {
return mergeFrom((com.google.analytics.admin.v1alpha.ListPropertiesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.analytics.admin.v1alpha.ListPropertiesResponse other) {
if (other == com.google.analytics.admin.v1alpha.ListPropertiesResponse.getDefaultInstance())
return this;
if (propertiesBuilder_ == null) {
if (!other.properties_.isEmpty()) {
if (properties_.isEmpty()) {
properties_ = other.properties_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensurePropertiesIsMutable();
properties_.addAll(other.properties_);
}
onChanged();
}
} else {
if (!other.properties_.isEmpty()) {
if (propertiesBuilder_.isEmpty()) {
propertiesBuilder_.dispose();
propertiesBuilder_ = null;
properties_ = other.properties_;
bitField0_ = (bitField0_ & ~0x00000001);
propertiesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getPropertiesFieldBuilder()
: null;
} else {
propertiesBuilder_.addAllMessages(other.properties_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.analytics.admin.v1alpha.Property m =
input.readMessage(
com.google.analytics.admin.v1alpha.Property.parser(), extensionRegistry);
if (propertiesBuilder_ == null) {
ensurePropertiesIsMutable();
properties_.add(m);
} else {
propertiesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.analytics.admin.v1alpha.Property> properties_ =
java.util.Collections.emptyList();
private void ensurePropertiesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
properties_ =
new java.util.ArrayList<com.google.analytics.admin.v1alpha.Property>(properties_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.admin.v1alpha.Property,
com.google.analytics.admin.v1alpha.Property.Builder,
com.google.analytics.admin.v1alpha.PropertyOrBuilder>
propertiesBuilder_;
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
public java.util.List<com.google.analytics.admin.v1alpha.Property> getPropertiesList() {
if (propertiesBuilder_ == null) {
return java.util.Collections.unmodifiableList(properties_);
} else {
return propertiesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
public int getPropertiesCount() {
if (propertiesBuilder_ == null) {
return properties_.size();
} else {
return propertiesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
public com.google.analytics.admin.v1alpha.Property getProperties(int index) {
if (propertiesBuilder_ == null) {
return properties_.get(index);
} else {
return propertiesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
public Builder setProperties(int index, com.google.analytics.admin.v1alpha.Property value) {
if (propertiesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePropertiesIsMutable();
properties_.set(index, value);
onChanged();
} else {
propertiesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
public Builder setProperties(
int index, com.google.analytics.admin.v1alpha.Property.Builder builderForValue) {
if (propertiesBuilder_ == null) {
ensurePropertiesIsMutable();
properties_.set(index, builderForValue.build());
onChanged();
} else {
propertiesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
public Builder addProperties(com.google.analytics.admin.v1alpha.Property value) {
if (propertiesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePropertiesIsMutable();
properties_.add(value);
onChanged();
} else {
propertiesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
public Builder addProperties(int index, com.google.analytics.admin.v1alpha.Property value) {
if (propertiesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePropertiesIsMutable();
properties_.add(index, value);
onChanged();
} else {
propertiesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
public Builder addProperties(
com.google.analytics.admin.v1alpha.Property.Builder builderForValue) {
if (propertiesBuilder_ == null) {
ensurePropertiesIsMutable();
properties_.add(builderForValue.build());
onChanged();
} else {
propertiesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
public Builder addProperties(
int index, com.google.analytics.admin.v1alpha.Property.Builder builderForValue) {
if (propertiesBuilder_ == null) {
ensurePropertiesIsMutable();
properties_.add(index, builderForValue.build());
onChanged();
} else {
propertiesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
public Builder addAllProperties(
java.lang.Iterable<? extends com.google.analytics.admin.v1alpha.Property> values) {
if (propertiesBuilder_ == null) {
ensurePropertiesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, properties_);
onChanged();
} else {
propertiesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
public Builder clearProperties() {
if (propertiesBuilder_ == null) {
properties_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
propertiesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
public Builder removeProperties(int index) {
if (propertiesBuilder_ == null) {
ensurePropertiesIsMutable();
properties_.remove(index);
onChanged();
} else {
propertiesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
public com.google.analytics.admin.v1alpha.Property.Builder getPropertiesBuilder(int index) {
return getPropertiesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
public com.google.analytics.admin.v1alpha.PropertyOrBuilder getPropertiesOrBuilder(int index) {
if (propertiesBuilder_ == null) {
return properties_.get(index);
} else {
return propertiesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
public java.util.List<? extends com.google.analytics.admin.v1alpha.PropertyOrBuilder>
getPropertiesOrBuilderList() {
if (propertiesBuilder_ != null) {
return propertiesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(properties_);
}
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
public com.google.analytics.admin.v1alpha.Property.Builder addPropertiesBuilder() {
return getPropertiesFieldBuilder()
.addBuilder(com.google.analytics.admin.v1alpha.Property.getDefaultInstance());
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
public com.google.analytics.admin.v1alpha.Property.Builder addPropertiesBuilder(int index) {
return getPropertiesFieldBuilder()
.addBuilder(index, com.google.analytics.admin.v1alpha.Property.getDefaultInstance());
}
/**
*
*
* <pre>
* Results that matched the filter criteria and were accessible to the caller.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Property properties = 1;</code>
*/
public java.util.List<com.google.analytics.admin.v1alpha.Property.Builder>
getPropertiesBuilderList() {
return getPropertiesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.admin.v1alpha.Property,
com.google.analytics.admin.v1alpha.Property.Builder,
com.google.analytics.admin.v1alpha.PropertyOrBuilder>
getPropertiesFieldBuilder() {
if (propertiesBuilder_ == null) {
propertiesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.admin.v1alpha.Property,
com.google.analytics.admin.v1alpha.Property.Builder,
com.google.analytics.admin.v1alpha.PropertyOrBuilder>(
properties_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
properties_ = null;
}
return propertiesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.admin.v1alpha.ListPropertiesResponse)
}
// @@protoc_insertion_point(class_scope:google.analytics.admin.v1alpha.ListPropertiesResponse)
private static final com.google.analytics.admin.v1alpha.ListPropertiesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.admin.v1alpha.ListPropertiesResponse();
}
public static com.google.analytics.admin.v1alpha.ListPropertiesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListPropertiesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListPropertiesResponse>() {
@java.lang.Override
public ListPropertiesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListPropertiesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListPropertiesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.ListPropertiesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/helix | 38,036 | zookeeper-api/src/test/java/org/apache/helix/zookeeper/impl/client/TestZkClientAsyncRetry.java | package org.apache.helix.zookeeper.impl.client;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.lang.management.ManagementFactory;
import javax.management.JMException;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import org.apache.helix.zookeeper.datamodel.ZNRecord;
import org.apache.helix.zookeeper.datamodel.serializer.ZNRecordSerializer;
import org.apache.helix.zookeeper.impl.ZkTestBase;
import org.apache.helix.zookeeper.zkclient.callback.ZkAsyncCallMonitorContext;
import org.apache.helix.zookeeper.zkclient.callback.ZkAsyncCallbacks;
import org.apache.helix.zookeeper.zkclient.callback.ZkAsyncRetryCallContext;
import org.apache.helix.zookeeper.zkclient.exception.ZkException;
import org.apache.helix.zookeeper.zkclient.exception.ZkInterruptedException;
import org.apache.helix.zookeeper.zkclient.metric.ZkClientMonitor;
import org.apache.helix.zookeeper.zkclient.metric.ZkClientPathMonitor;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import static org.apache.zookeeper.KeeperException.Code.CONNECTIONLOSS;
/**
* Note this is a whitebox test to test the async operation callback/context.
* We don't have a good way to simulate an async ZK operation failure in the server side yet.
*/
public class TestZkClientAsyncRetry extends ZkTestBase {
private final String TEST_ROOT = String.format("/%s", getClass().getSimpleName());
private final String NODE_PATH = TEST_ROOT + "/async";
// Retry wait time is set to 3 times of the retry interval so as to leave extra buffer in case
// the test environment is slow. Extra wait time won't impact the test logic.
private final long RETRY_OPS_WAIT_TIMEOUT_MS = 3 * MockAsyncZkClient.RETRY_INTERVAL_MS;
final String TEST_TAG = "test_tag";
final String TEST_KEY = "test_key";
final String TEST_INSTANCE = "test_instance";
private org.apache.helix.zookeeper.zkclient.ZkClient _zkClient;
private String _zkServerAddress;
private final MBeanServer _beanServer = ManagementFactory.getPlatformMBeanServer();
private ZkClientMonitor _monitor;
ObjectName _rootName;
int _readFailures;
int _writeFailures;
@BeforeClass
public void beforeClass() throws JMException {
_zkClient = _zkServerMap.values().iterator().next().getZkClient();
_zkServerAddress = _zkClient.getServers();
_zkClient.createPersistent(TEST_ROOT);
_monitor = new ZkClientMonitor(TEST_TAG, TEST_KEY, TEST_INSTANCE, false, null);
_monitor.register();
_rootName = buildPathMonitorObjectName(TEST_TAG, TEST_KEY, TEST_INSTANCE,
ZkClientPathMonitor.PredefinedPath.Root.name());
_readFailures = 0;
_writeFailures = 0;
}
@AfterClass
public void afterClass() {
_monitor.unregister();
_zkClient.deleteRecursively(TEST_ROOT);
_zkClient.close();
}
private boolean needRetry(int rc) {
switch (KeeperException.Code.get(rc)) {
/** Connection to the server has been lost */
case CONNECTIONLOSS:
/** The session has been expired by the server */
case SESSIONEXPIRED:
/** Session moved to another server, so operation is ignored */
case SESSIONMOVED:
return true;
default:
return false;
}
}
private boolean waitAsyncOperation(ZkAsyncCallbacks.DefaultCallback callback, long timeout) {
final boolean[] ret = { false };
Thread waitThread = new Thread(() -> ret[0] = callback.waitForSuccess());
waitThread.start();
try {
waitThread.join(timeout);
waitThread.interrupt();
return ret[0];
} catch (InterruptedException e) {
return false;
}
}
private ObjectName buildObjectName(String tag, String key, String instance)
throws MalformedObjectNameException {
return ZkClientMonitor.getObjectName(tag, key, instance);
}
private ObjectName buildPathMonitorObjectName(String tag, String key, String instance,
String path) throws MalformedObjectNameException {
return new ObjectName(String.format("%s,%s=%s", buildObjectName(tag, key, instance).toString(),
ZkClientPathMonitor.MONITOR_PATH, path));
}
@Test
public void testAsyncRetryCategories() throws JMException {
MockAsyncZkClient testZkClient = new MockAsyncZkClient(_zkServerAddress);
try {
ZNRecord tmpRecord = new ZNRecord("tmpRecord");
tmpRecord.setSimpleField("foo", "bar");
// Loop all possible error codes to test async create.
// Only connectivity issues will be retried, the other issues will be return error immediately.
for (KeeperException.Code code : KeeperException.Code.values()) {
if (code == KeeperException.Code.OK) {
continue;
}
ZkAsyncCallbacks.CreateCallbackHandler createCallback =
new ZkAsyncCallbacks.CreateCallbackHandler();
Assert.assertEquals(createCallback.getRc(), KeeperException.Code.APIERROR.intValue());
testZkClient.setAsyncCallRC(code.intValue());
if (code == CONNECTIONLOSS || code == KeeperException.Code.SESSIONEXPIRED
|| code == KeeperException.Code.SESSIONMOVED) {
// Async create will be pending due to the mock error rc is retryable.
testZkClient.asyncCreate(NODE_PATH, null, CreateMode.PERSISTENT, createCallback);
Assert.assertFalse(createCallback.isOperationDone());
Assert.assertEquals(createCallback.getRc(), code.intValue());
// Change the mock response
testZkClient.setAsyncCallRC(KeeperException.Code.OK.intValue());
// Async retry will succeed now. Wait until the operation is successfully done and verify.
Assert.assertTrue(waitAsyncOperation(createCallback, RETRY_OPS_WAIT_TIMEOUT_MS));
Assert.assertEquals(createCallback.getRc(), KeeperException.Code.OK.intValue());
Assert.assertTrue(testZkClient.exists(NODE_PATH));
Assert.assertTrue(testZkClient.getAndResetRetryCount() >= 1);
} else {
// Async create will fail due to the mock error rc is not recoverable.
testZkClient.asyncCreate(NODE_PATH, null, CreateMode.PERSISTENT, createCallback);
Assert.assertTrue(waitAsyncOperation(createCallback, RETRY_OPS_WAIT_TIMEOUT_MS));
Assert.assertEquals(createCallback.getRc(), code.intValue());
Assert.assertEquals(testZkClient.getAndResetRetryCount(), 0);
++_writeFailures;
}
Assert.assertEquals((long) _beanServer.getAttribute(_rootName,
ZkClientPathMonitor.PredefinedMetricDomains.WriteAsyncFailureCounter.toString()),
_writeFailures);
testZkClient.delete(NODE_PATH);
Assert.assertFalse(testZkClient.exists(NODE_PATH));
}
} finally {
testZkClient.setAsyncCallRC(KeeperException.Code.OK.intValue());
testZkClient.close();
_zkClient.delete(NODE_PATH);
}
}
@Test(dependsOnMethods = "testAsyncRetryCategories")
public void testAsyncWriteRetry() throws JMException {
MockAsyncZkClient testZkClient = new MockAsyncZkClient(_zkServerAddress);
try {
ZNRecord tmpRecord = new ZNRecord("tmpRecord");
tmpRecord.setSimpleField("foo", "bar");
testZkClient.createPersistent(NODE_PATH, tmpRecord);
// 1. Test async set retry
ZkAsyncCallbacks.SetDataCallbackHandler setCallback =
new ZkAsyncCallbacks.SetDataCallbackHandler();
Assert.assertEquals(setCallback.getRc(), KeeperException.Code.APIERROR.intValue());
tmpRecord.setSimpleField("test", "data");
testZkClient.setAsyncCallRC(CONNECTIONLOSS.intValue());
// Async set will be pending due to the mock error rc is retryable.
testZkClient.asyncSetData(NODE_PATH, tmpRecord, -1, setCallback);
Assert.assertFalse(setCallback.isOperationDone());
Assert.assertEquals(setCallback.getRc(), CONNECTIONLOSS.intValue());
// Change the mock return code.
testZkClient.setAsyncCallRC(KeeperException.Code.OK.intValue());
// Async retry will succeed now. Wait until the operation is successfully done and verify.
Assert.assertTrue(waitAsyncOperation(setCallback, RETRY_OPS_WAIT_TIMEOUT_MS));
Assert.assertEquals(setCallback.getRc(), KeeperException.Code.OK.intValue());
Assert.assertEquals(((ZNRecord) testZkClient.readData(NODE_PATH)).getSimpleField("test"),
"data");
Assert.assertTrue(testZkClient.getAndResetRetryCount() >= 1);
// Check failure metric, which should be unchanged because the operation succeeded
Assert.assertEquals((long) _beanServer.getAttribute(_rootName,
ZkClientPathMonitor.PredefinedMetricDomains.WriteAsyncFailureCounter.toString()),
_writeFailures);
// 2. Test async delete
ZkAsyncCallbacks.DeleteCallbackHandler deleteCallback =
new ZkAsyncCallbacks.DeleteCallbackHandler();
Assert.assertEquals(deleteCallback.getRc(), KeeperException.Code.APIERROR.intValue());
testZkClient.setAsyncCallRC(CONNECTIONLOSS.intValue());
// Async delete will be pending due to the mock error rc is retryable.
testZkClient.asyncDelete(NODE_PATH, deleteCallback);
Assert.assertFalse(deleteCallback.isOperationDone());
Assert.assertEquals(deleteCallback.getRc(), CONNECTIONLOSS.intValue());
// Change the mock return code.
testZkClient.setAsyncCallRC(KeeperException.Code.OK.intValue());
// Async retry will succeed now. Wait until the operation is successfully done and verify.
Assert.assertTrue(waitAsyncOperation(deleteCallback, RETRY_OPS_WAIT_TIMEOUT_MS));
Assert.assertEquals(deleteCallback.getRc(), KeeperException.Code.OK.intValue());
Assert.assertFalse(testZkClient.exists(NODE_PATH));
Assert.assertTrue(testZkClient.getAndResetRetryCount() >= 1);
// Check failure metric, which should be unchanged because the operation succeeded
Assert.assertEquals((long) _beanServer.getAttribute(_rootName,
ZkClientPathMonitor.PredefinedMetricDomains.WriteAsyncFailureCounter.toString()),
_writeFailures);
} finally {
testZkClient.setAsyncCallRC(KeeperException.Code.OK.intValue());
testZkClient.close();
_zkClient.delete(NODE_PATH);
}
}
@Test(dependsOnMethods = "testAsyncWriteRetry")
public void testAsyncRetryCustomizedCallback() throws JMException {
// int array to store customized async callback return value. Initial value set to 100, witch
// not used by any ZK return code.
final int[] _returnCode = new int[2];
_returnCode[0] = 100;
_returnCode[1] = 100;
// Define Customized callback
class CustomizedSetCallback extends ZkAsyncCallbacks.SetDataCallbackHandler {
@Override
public void handle() {
_returnCode[0] = getRc();
}
}
MockAsyncZkClient testZkClient = new MockAsyncZkClient(_zkServerAddress);
try {
ZNRecord tmpRecord = new ZNRecord("tmpRecord");
tmpRecord.setSimpleField("foo", "bar");
testZkClient.createPersistent(NODE_PATH, tmpRecord);
// 1. Test async set retry
CustomizedSetCallback setCallback =
new CustomizedSetCallback();
Assert.assertEquals(setCallback.getRc(), KeeperException.Code.APIERROR.intValue());
tmpRecord.setSimpleField("test", "data");
testZkClient.setAsyncCallRC(CONNECTIONLOSS.intValue());
// Async set will be pending due to the mock error rc is retryable.
testZkClient.asyncSetData(NODE_PATH, tmpRecord, -1, setCallback);
Assert.assertFalse(setCallback.isOperationDone());
Assert.assertEquals(setCallback.getRc(), CONNECTIONLOSS.intValue());
// handle() haven't been called until retry finished or canceled, assert it is default value.
Assert.assertEquals(_returnCode[0], 100);
// Change the mock return code.
testZkClient.setAsyncCallRC(KeeperException.Code.OK.intValue());
// Async retry will succeed now. Wait until the operation is successfully done and verify.
Assert.assertTrue(waitAsyncOperation(setCallback, RETRY_OPS_WAIT_TIMEOUT_MS));
Assert.assertEquals(setCallback.getRc(), KeeperException.Code.OK.intValue());
// handle() called when retry finished, check return value.
Assert.assertEquals(_returnCode[0], KeeperException.Code.OK.intValue());
Assert.assertTrue(testZkClient.getAndResetRetryCount() >= 1);
// 2. Test async delete
class CustomizedDeleteCallback extends ZkAsyncCallbacks.DeleteCallbackHandler{
@Override
public void handle() {
_returnCode[1] = getRc();
}
}
CustomizedDeleteCallback deleteCallback =
new CustomizedDeleteCallback();
Assert.assertEquals(deleteCallback.getRc(), KeeperException.Code.APIERROR.intValue());
testZkClient.setAsyncCallRC(CONNECTIONLOSS.intValue());
// Async delete will be pending due to the mock error rc is retryable.
testZkClient.asyncDelete(NODE_PATH, deleteCallback);
Assert.assertFalse(deleteCallback.isOperationDone());
Assert.assertEquals(deleteCallback.getRc(), CONNECTIONLOSS.intValue());
// handle() haven't been called until retry finished or canceled, assert it is default value.
Assert.assertEquals(_returnCode[1], 100);
// Change the mock return code.
testZkClient.setAsyncCallRC(KeeperException.Code.OK.intValue());
// Async retry will succeed now. Wait until the operation is successfully done and verify.
Assert.assertTrue(waitAsyncOperation(deleteCallback, RETRY_OPS_WAIT_TIMEOUT_MS));
Assert.assertEquals(deleteCallback.getRc(), KeeperException.Code.OK.intValue());
Assert.assertFalse(testZkClient.exists(NODE_PATH));
Assert.assertTrue(testZkClient.getAndResetRetryCount() >= 1);
// handle() called when retry finished, check return value.
Assert.assertEquals(_returnCode[1], KeeperException.Code.OK.intValue());
} finally {
testZkClient.setAsyncCallRC(KeeperException.Code.OK.intValue());
testZkClient.close();
_zkClient.delete(NODE_PATH);
}
}
/*
* Tests if exception is thrown during retry operation,
* the context should be cancelled correctly.
*/
@Test(dependsOnMethods = "testAsyncRetryCustomizedCallback")
public void testAsyncWriteRetryThrowException() throws JMException {
MockAsyncZkClient testZkClient = new MockAsyncZkClient(_zkServerAddress);
try {
ZNRecord tmpRecord = new ZNRecord("tmpRecord");
tmpRecord.setSimpleField("foo", "bar");
testZkClient.createPersistent(NODE_PATH, tmpRecord);
// 1. Test async create retry
ZkAsyncCallbacks.CreateCallbackHandler createCallback =
new ZkAsyncCallbacks.CreateCallbackHandler();
Assert.assertEquals(createCallback.getRc(), KeeperException.Code.APIERROR.intValue());
tmpRecord.setSimpleField("test", "data");
testZkClient.setAsyncCallRC(CONNECTIONLOSS.intValue());
// Async set will be pending due to the mock error rc is retryable.
testZkClient.asyncCreate(NODE_PATH, tmpRecord, CreateMode.PERSISTENT, createCallback);
Assert.assertFalse(createCallback.isOperationDone());
Assert.assertEquals(createCallback.getRc(), CONNECTIONLOSS.intValue());
// Throw exception in retry
testZkClient.setZkExceptionInRetry(true);
// Async retry will succeed now. Wait until the operation is done and verify.
Assert.assertTrue(waitAsyncOperation(createCallback, RETRY_OPS_WAIT_TIMEOUT_MS),
"Async callback should have been canceled");
Assert.assertEquals(createCallback.getRc(), CONNECTIONLOSS.intValue());
Assert.assertTrue(testZkClient.getAndResetRetryCount() >= 1);
// Check failure metric, which should be unchanged because the operation succeeded
Assert.assertEquals((long) _beanServer.getAttribute(_rootName,
ZkClientPathMonitor.PredefinedMetricDomains.WriteAsyncFailureCounter.toString()),
_writeFailures);
// Restore the state
testZkClient.setZkExceptionInRetry(false);
// 1. Test async set retry
ZkAsyncCallbacks.SetDataCallbackHandler setCallback =
new ZkAsyncCallbacks.SetDataCallbackHandler();
Assert.assertEquals(setCallback.getRc(), KeeperException.Code.APIERROR.intValue());
tmpRecord.setSimpleField("test", "data");
testZkClient.setAsyncCallRC(CONNECTIONLOSS.intValue());
// Async set will be pending due to the mock error rc is retryable.
testZkClient.asyncSetData(NODE_PATH, tmpRecord, -1, setCallback);
Assert.assertFalse(setCallback.isOperationDone());
Assert.assertEquals(setCallback.getRc(), CONNECTIONLOSS.intValue());
// Throw exception in retry
testZkClient.setZkExceptionInRetry(true);
// Async retry will succeed now. Wait until the operation is done and verify.
Assert.assertTrue(waitAsyncOperation(setCallback, RETRY_OPS_WAIT_TIMEOUT_MS),
"Async callback should have been canceled");
Assert.assertEquals(setCallback.getRc(), CONNECTIONLOSS.intValue());
Assert.assertTrue(testZkClient.getAndResetRetryCount() >= 1);
// Check failure metric, which should be unchanged because the operation succeeded
Assert.assertEquals((long) _beanServer.getAttribute(_rootName,
ZkClientPathMonitor.PredefinedMetricDomains.WriteAsyncFailureCounter.toString()),
_writeFailures);
} finally {
testZkClient.setAsyncCallRC(KeeperException.Code.OK.intValue());
testZkClient.close();
_zkClient.delete(NODE_PATH);
}
}
/*
* Test handle() is executed once if callback retry is canceled.
*/
@Test(dependsOnMethods = "testAsyncWriteRetryThrowException")
public void testAsyncRetryCustomizedCallbackCancel() throws JMException {
// int array to store customized async callback return value. Initial value set to 100, witch
// not used by any ZK return code.
final int[] _returnCode = new int[2];
_returnCode[0] = 100;
_returnCode[1] = 100;
// Define Customized callback
class CustomizedCreateCallback extends ZkAsyncCallbacks.CreateCallbackHandler {
@Override
public void handle() {
_returnCode[0] = getRc();
}
}
MockAsyncZkClient testZkClient = new MockAsyncZkClient(_zkServerAddress);
try {
ZNRecord tmpRecord = new ZNRecord("tmpRecord");
tmpRecord.setSimpleField("foo", "bar");
testZkClient.createPersistent(NODE_PATH, tmpRecord);
// 1. Test async create retry
CustomizedCreateCallback createCallback =
new CustomizedCreateCallback();
Assert.assertEquals(createCallback.getRc(), KeeperException.Code.APIERROR.intValue());
tmpRecord.setSimpleField("test", "data");
testZkClient.setAsyncCallRC(CONNECTIONLOSS.intValue());
// Async set will be pending due to the mock error rc is retryable.
testZkClient.asyncCreate(NODE_PATH, tmpRecord, CreateMode.PERSISTENT, createCallback);
Assert.assertFalse(createCallback.isOperationDone());
// Original callback should have return code set to CONNECTIONLOSS
Assert.assertEquals(createCallback.getRc(), CONNECTIONLOSS.intValue());
// handle() haven't been called until retry finished or canceled, assert it is default value.
Assert.assertEquals(_returnCode[0], 100);
// Throw exception in retry
testZkClient.setZkExceptionInRetry(true);
// Async retry will succeed now. Wait until the operation is done and verify.
Assert.assertTrue(waitAsyncOperation(createCallback, RETRY_OPS_WAIT_TIMEOUT_MS),
"Async callback should have been canceled");
Assert.assertEquals(createCallback.getRc(), CONNECTIONLOSS.intValue());
Assert.assertEquals(_returnCode[0], CONNECTIONLOSS.intValue());
Assert.assertTrue(testZkClient.getAndResetRetryCount() >= 1);
// Restore the state
testZkClient.setZkExceptionInRetry(false);
class CustomizedSetCallback extends ZkAsyncCallbacks.SetDataCallbackHandler {
@Override
public void handle() {
_returnCode[1] = getRc();
}
}
// 1. Test async set retry
CustomizedSetCallback setCallback =
new CustomizedSetCallback();
Assert.assertEquals(setCallback.getRc(), KeeperException.Code.APIERROR.intValue());
tmpRecord.setSimpleField("test", "data");
testZkClient.setAsyncCallRC(CONNECTIONLOSS.intValue());
// Async set will be pending due to the mock error rc is retryable.
testZkClient.asyncSetData(NODE_PATH, tmpRecord, -1, setCallback);
Assert.assertFalse(setCallback.isOperationDone());
// Original callback should have return code set to CONNECTIONLOSS
Assert.assertEquals(createCallback.getRc(), CONNECTIONLOSS.intValue());
// handle() haven't been called until retry finished or canceled, assert it is default value.
Assert.assertEquals(_returnCode[1], 100);
// Throw exception in retry
testZkClient.setZkExceptionInRetry(true);
// Async retry will succeed now. Wait until the operation is done and verify.
Assert.assertTrue(waitAsyncOperation(setCallback, RETRY_OPS_WAIT_TIMEOUT_MS),
"Async callback should have been canceled");
Assert.assertEquals(setCallback.getRc(), CONNECTIONLOSS.intValue());
Assert.assertEquals(_returnCode[1], CONNECTIONLOSS.intValue());
Assert.assertTrue(testZkClient.getAndResetRetryCount() >= 1);
} finally {
testZkClient.setAsyncCallRC(KeeperException.Code.OK.intValue());
testZkClient.close();
_zkClient.delete(NODE_PATH);
}
}
@Test(dependsOnMethods = "testAsyncRetryCustomizedCallbackCancel")
public void testAsyncReadRetry() throws JMException {
MockAsyncZkClient testZkClient = new MockAsyncZkClient(_zkServerAddress);
try {
ZNRecord tmpRecord = new ZNRecord("tmpRecord");
tmpRecord.setSimpleField("foo", "bar");
testZkClient.createPersistent(NODE_PATH, tmpRecord);
// 1. Test async exist check
ZkAsyncCallbacks.ExistsCallbackHandler existsCallback =
new ZkAsyncCallbacks.ExistsCallbackHandler();
Assert.assertEquals(existsCallback.getRc(), KeeperException.Code.APIERROR.intValue());
testZkClient.setAsyncCallRC(CONNECTIONLOSS.intValue());
// Async exist check will be pending due to the mock error rc is retryable.
testZkClient.asyncExists(NODE_PATH, existsCallback);
Assert.assertFalse(existsCallback.isOperationDone());
Assert.assertEquals(existsCallback.getRc(), CONNECTIONLOSS.intValue());
// Change the mock return code.
testZkClient.setAsyncCallRC(KeeperException.Code.OK.intValue());
// Async retry will succeed now. Wait until the operation is successfully done and verify.
Assert.assertTrue(waitAsyncOperation(existsCallback, RETRY_OPS_WAIT_TIMEOUT_MS));
Assert.assertEquals(existsCallback.getRc(), KeeperException.Code.OK.intValue());
Assert.assertTrue(existsCallback._stat != null);
Assert.assertTrue(testZkClient.getAndResetRetryCount() >= 1);
// Check failure metric, which should be unchanged because the operation succeeded
Assert.assertEquals((long) _beanServer.getAttribute(_rootName,
ZkClientPathMonitor.PredefinedMetricDomains.ReadAsyncFailureCounter.toString()),
_readFailures);
// 2. Test async get
ZkAsyncCallbacks.GetDataCallbackHandler getCallback =
new ZkAsyncCallbacks.GetDataCallbackHandler();
Assert.assertEquals(getCallback.getRc(), KeeperException.Code.APIERROR.intValue());
testZkClient.setAsyncCallRC(CONNECTIONLOSS.intValue());
// Async get will be pending due to the mock error rc is retryable.
testZkClient.asyncGetData(NODE_PATH, getCallback);
Assert.assertFalse(getCallback.isOperationDone());
Assert.assertEquals(getCallback.getRc(), CONNECTIONLOSS.intValue());
// Change the mock return code.
testZkClient.setAsyncCallRC(KeeperException.Code.OK.intValue());
// Async retry will succeed now. Wait until the operation is successfully done and verify.
Assert.assertTrue(waitAsyncOperation(getCallback, RETRY_OPS_WAIT_TIMEOUT_MS));
Assert.assertEquals(getCallback.getRc(), KeeperException.Code.OK.intValue());
ZNRecord record = testZkClient.deserialize(getCallback._data, NODE_PATH);
Assert.assertEquals(record.getSimpleField("foo"), "bar");
Assert.assertTrue(testZkClient.getAndResetRetryCount() >= 1);
// Check failure metric, which should be unchanged because the operation succeeded
Assert.assertEquals((long) _beanServer.getAttribute(_rootName,
ZkClientPathMonitor.PredefinedMetricDomains.ReadAsyncFailureCounter.toString()),
_readFailures);
} finally {
testZkClient.setAsyncCallRC(KeeperException.Code.OK.intValue());
testZkClient.close();
_zkClient.delete(NODE_PATH);
}
}
@Test(dependsOnMethods = "testAsyncReadRetry")
public void testAsyncRequestCleanup() throws JMException {
int cbCount = 10;
MockAsyncZkClient testZkClient = new MockAsyncZkClient(_zkServerAddress);
try {
ZNRecord tmpRecord = new ZNRecord("tmpRecord");
tmpRecord.setSimpleField("foo", "bar");
testZkClient.createPersistent(NODE_PATH, tmpRecord);
// Create 10 async exists check requests
ZkAsyncCallbacks.ExistsCallbackHandler[] existsCallbacks =
new ZkAsyncCallbacks.ExistsCallbackHandler[cbCount];
for (int i = 0; i < cbCount; i++) {
existsCallbacks[i] = new ZkAsyncCallbacks.ExistsCallbackHandler();
}
testZkClient.setAsyncCallRC(CONNECTIONLOSS.intValue());
// All async exist check calls will be pending due to the mock error rc is retryable.
for (ZkAsyncCallbacks.ExistsCallbackHandler cb : existsCallbacks) {
testZkClient.asyncExists(NODE_PATH, cb);
Assert.assertEquals(cb.getRc(), CONNECTIONLOSS.intValue());
}
// Wait for a while, no callback finishes
Assert.assertFalse(waitAsyncOperation(existsCallbacks[0], RETRY_OPS_WAIT_TIMEOUT_MS));
for (ZkAsyncCallbacks.ExistsCallbackHandler cb : existsCallbacks) {
Assert.assertEquals(cb.getRc(), CONNECTIONLOSS.intValue());
Assert.assertFalse(cb.isOperationDone());
}
testZkClient.close();
// All callback retry will be cancelled because the zkclient is closed.
for (ZkAsyncCallbacks.ExistsCallbackHandler cb : existsCallbacks) {
Assert.assertTrue(waitAsyncOperation(cb, RETRY_OPS_WAIT_TIMEOUT_MS));
Assert.assertEquals(cb.getRc(), CONNECTIONLOSS.intValue());
// The failure metric doesn't increase here, because an exception is thrown before the logic
// responsible for increasing the metric is reached.
Assert.assertEquals((long) _beanServer.getAttribute(_rootName,
ZkClientPathMonitor.PredefinedMetricDomains.ReadAsyncFailureCounter.toString()),
_readFailures);
}
Assert.assertTrue(testZkClient.getAndResetRetryCount() >= 1);
} finally {
testZkClient.setAsyncCallRC(KeeperException.Code.OK.intValue());
testZkClient.close();
_zkClient.delete(NODE_PATH);
}
}
@Test(dependsOnMethods = "testAsyncRequestCleanup")
public void testAsyncFailureMetrics() throws JMException {
// The remaining failure paths that weren't covered in other test methods are tested here
MockAsyncZkClient testZkClient = new MockAsyncZkClient(_zkServerAddress);
try {
ZNRecord tmpRecord = new ZNRecord("tmpRecord");
tmpRecord.setSimpleField("foo", "bar");
testZkClient.createPersistent(NODE_PATH, tmpRecord);
// Test asyncGet failure
ZkAsyncCallbacks.GetDataCallbackHandler getCallback =
new ZkAsyncCallbacks.GetDataCallbackHandler();
Assert.assertEquals(getCallback.getRc(), KeeperException.Code.APIERROR.intValue());
// asyncGet should fail because the return code is APIERROR
testZkClient.setAsyncCallRC(KeeperException.Code.APIERROR.intValue());
testZkClient.asyncGetData(NODE_PATH, getCallback);
getCallback.waitForSuccess();
Assert.assertEquals(getCallback.getRc(), KeeperException.Code.APIERROR.intValue());
++_readFailures;
Assert.assertEquals((long) _beanServer.getAttribute(_rootName,
ZkClientPathMonitor.PredefinedMetricDomains.ReadAsyncFailureCounter.toString()),
_readFailures);
// asyncGet should succeed because the return code is NONODE
testZkClient.setAsyncCallRC(KeeperException.Code.NONODE.intValue());
testZkClient.asyncGetData(NODE_PATH, getCallback);
getCallback.waitForSuccess();
Assert.assertEquals(getCallback.getRc(), KeeperException.Code.NONODE.intValue());
Assert.assertEquals((long) _beanServer.getAttribute(_rootName,
ZkClientPathMonitor.PredefinedMetricDomains.ReadAsyncFailureCounter.toString()),
_readFailures);
// Test asyncExists failure
ZkAsyncCallbacks.ExistsCallbackHandler existsCallback =
new ZkAsyncCallbacks.ExistsCallbackHandler();
Assert.assertEquals(existsCallback.getRc(), KeeperException.Code.APIERROR.intValue());
// asyncExists should fail because the return code is APIERROR
testZkClient.setAsyncCallRC(KeeperException.Code.APIERROR.intValue());
testZkClient.asyncExists(NODE_PATH, existsCallback);
existsCallback.waitForSuccess();
Assert.assertEquals(existsCallback.getRc(), KeeperException.Code.APIERROR.intValue());
++_readFailures;
Assert.assertEquals((long) _beanServer.getAttribute(_rootName,
ZkClientPathMonitor.PredefinedMetricDomains.ReadAsyncFailureCounter.toString()),
_readFailures);
// asyncExists should fail because the return code is NONODE
testZkClient.setAsyncCallRC(KeeperException.Code.NONODE.intValue());
testZkClient.asyncExists(NODE_PATH, existsCallback);
existsCallback.waitForSuccess();
Assert.assertEquals(existsCallback.getRc(), KeeperException.Code.NONODE.intValue());
Assert.assertEquals((long) _beanServer.getAttribute(_rootName,
ZkClientPathMonitor.PredefinedMetricDomains.ReadAsyncFailureCounter.toString()),
_readFailures);
// Test asyncSet failure
ZkAsyncCallbacks.SetDataCallbackHandler setCallback =
new ZkAsyncCallbacks.SetDataCallbackHandler();
Assert.assertEquals(setCallback.getRc(), KeeperException.Code.APIERROR.intValue());
// asyncSet should fail because the return code is APIERROR
testZkClient.setAsyncCallRC(KeeperException.Code.APIERROR.intValue());
testZkClient.asyncSetData(NODE_PATH, tmpRecord, -1, setCallback);
setCallback.waitForSuccess();
Assert.assertEquals(setCallback.getRc(), KeeperException.Code.APIERROR.intValue());
++_writeFailures;
Assert.assertEquals((long) _beanServer.getAttribute(_rootName,
ZkClientPathMonitor.PredefinedMetricDomains.WriteAsyncFailureCounter.toString()),
_writeFailures);
// Test asyncDelete failure
ZkAsyncCallbacks.DeleteCallbackHandler deleteCallback =
new ZkAsyncCallbacks.DeleteCallbackHandler();
Assert.assertEquals(deleteCallback.getRc(), KeeperException.Code.APIERROR.intValue());
// asyncDelete should fail because the return code is APIERROR
testZkClient.setAsyncCallRC(KeeperException.Code.APIERROR.intValue());
testZkClient.asyncDelete(NODE_PATH, deleteCallback);
deleteCallback.waitForSuccess();
Assert.assertEquals(deleteCallback.getRc(), KeeperException.Code.APIERROR.intValue());
++_writeFailures;
Assert.assertEquals((long) _beanServer.getAttribute(_rootName,
ZkClientPathMonitor.PredefinedMetricDomains.WriteAsyncFailureCounter.toString()),
_writeFailures);
} finally {
testZkClient.setAsyncCallRC(KeeperException.Code.OK.intValue());
testZkClient.close();
_zkClient.delete(NODE_PATH);
}
}
/**
* Mock client to whitebox test async functionality.
*/
class MockAsyncZkClient extends ZkClient {
private static final long RETRY_INTERVAL_MS = 500;
private long _retryCount = 0;
/**
* If the specified return code is OK, call the real function.
* Otherwise, trigger the callback with the specified RC without triggering the real ZK call.
*/
private int _asyncCallRetCode = KeeperException.Code.OK.intValue();
private boolean _zkExceptionInRetry = false;
public MockAsyncZkClient(String zkAddress) {
super(zkAddress);
setZkSerializer(new ZNRecordSerializer());
}
public void setAsyncCallRC(int rc) {
_asyncCallRetCode = rc;
}
public long getAndResetRetryCount() {
long tmpCount = _retryCount;
_retryCount = 0;
return tmpCount;
}
public void setZkExceptionInRetry(boolean zkExceptionInRetry) {
_zkExceptionInRetry = zkExceptionInRetry;
}
@Override
public void asyncCreate(String path, Object datat, CreateMode mode,
ZkAsyncCallbacks.CreateCallbackHandler cb) {
if (_asyncCallRetCode == KeeperException.Code.OK.intValue()) {
super.asyncCreate(path, datat, mode, cb);
return;
} else if (needRetry(_asyncCallRetCode)) {
cb.processResult(_asyncCallRetCode, path,
new ZkAsyncRetryCallContext(_asyncCallRetryThread, cb, null, 0, 0, false) {
@Override
protected void doRetry() {
preProcess();
asyncCreate(path, datat, mode, cb);
}
}, null);
} else {
cb.processResult(_asyncCallRetCode, path,
new ZkAsyncCallMonitorContext(_monitor, 0, 0, false), null);
}
}
@Override
public void asyncSetData(String path, Object datat, int version,
ZkAsyncCallbacks.SetDataCallbackHandler cb) {
if (_asyncCallRetCode == KeeperException.Code.OK.intValue()) {
super.asyncSetData(path, datat, version, cb);
return;
} else if (needRetry(_asyncCallRetCode)) {
cb.processResult(_asyncCallRetCode, path,
new ZkAsyncRetryCallContext(_asyncCallRetryThread, cb, null, 0, 0, false) {
@Override
protected void doRetry() {
preProcess();
asyncSetData(path, datat, version, cb);
}
}, null);
} else {
cb.processResult(_asyncCallRetCode, path,
new ZkAsyncCallMonitorContext(_monitor, 0, 0, false), null);
}
}
@Override
public void asyncGetData(String path, ZkAsyncCallbacks.GetDataCallbackHandler cb) {
if (_asyncCallRetCode == KeeperException.Code.OK.intValue()) {
super.asyncGetData(path, cb);
return;
} else if (needRetry(_asyncCallRetCode)) {
cb.processResult(_asyncCallRetCode, path,
new ZkAsyncRetryCallContext(_asyncCallRetryThread, cb, null, 0, 0, true) {
@Override
protected void doRetry() {
preProcess();
asyncGetData(path, cb);
}
}, null, null);
} else {
cb.processResult(_asyncCallRetCode, path,
new ZkAsyncCallMonitorContext(_monitor, 0, 0, true), null, null);
}
}
@Override
public void asyncExists(String path, ZkAsyncCallbacks.ExistsCallbackHandler cb) {
if (_asyncCallRetCode == KeeperException.Code.OK.intValue()) {
super.asyncExists(path, cb);
return;
} else if (needRetry(_asyncCallRetCode)) {
cb.processResult(_asyncCallRetCode, path,
new ZkAsyncRetryCallContext(_asyncCallRetryThread, cb, null, 0, 0, true) {
@Override
protected void doRetry() {
preProcess();
asyncExists(path, cb);
}
}, null);
} else {
cb.processResult(_asyncCallRetCode, path,
new ZkAsyncCallMonitorContext(_monitor, 0, 0, true), null);
}
}
@Override
public void asyncDelete(String path, ZkAsyncCallbacks.DeleteCallbackHandler cb) {
if (_asyncCallRetCode == KeeperException.Code.OK.intValue()) {
super.asyncDelete(path, cb);
return;
} else if (needRetry(_asyncCallRetCode)) {
cb.processResult(_asyncCallRetCode, path,
new ZkAsyncRetryCallContext(_asyncCallRetryThread, cb, null, 0, 0, false) {
@Override
protected void doRetry() {
preProcess();
asyncDelete(path, cb);
}
});
} else {
cb.processResult(_asyncCallRetCode, path,
new ZkAsyncCallMonitorContext(_monitor, 0, 0, false));
}
}
private void preProcess() {
_retryCount++;
if (_zkExceptionInRetry) {
throw new ZkException();
}
try {
Thread.sleep(RETRY_INTERVAL_MS);
} catch (InterruptedException e) {
throw new ZkInterruptedException(e);
}
}
}
}
|
googleapis/google-cloud-java | 37,606 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UserActionReference.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/user_action_reference.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* References an API call. It contains more information about long running
* operation and Jobs that are triggered by the API call.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.UserActionReference}
*/
public final class UserActionReference extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.UserActionReference)
UserActionReferenceOrBuilder {
private static final long serialVersionUID = 0L;
// Use UserActionReference.newBuilder() to construct.
private UserActionReference(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UserActionReference() {
method_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UserActionReference();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.UserActionReferenceProto
.internal_static_google_cloud_aiplatform_v1beta1_UserActionReference_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.UserActionReferenceProto
.internal_static_google_cloud_aiplatform_v1beta1_UserActionReference_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.UserActionReference.class,
com.google.cloud.aiplatform.v1beta1.UserActionReference.Builder.class);
}
private int referenceCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object reference_;
public enum ReferenceCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
OPERATION(1),
DATA_LABELING_JOB(2),
REFERENCE_NOT_SET(0);
private final int value;
private ReferenceCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static ReferenceCase valueOf(int value) {
return forNumber(value);
}
public static ReferenceCase forNumber(int value) {
switch (value) {
case 1:
return OPERATION;
case 2:
return DATA_LABELING_JOB;
case 0:
return REFERENCE_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public ReferenceCase getReferenceCase() {
return ReferenceCase.forNumber(referenceCase_);
}
public static final int OPERATION_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* For API calls that return a long running operation.
* Resource name of the long running operation.
* Format:
* `projects/{project}/locations/{location}/operations/{operation}`
* </pre>
*
* <code>string operation = 1;</code>
*
* @return Whether the operation field is set.
*/
public boolean hasOperation() {
return referenceCase_ == 1;
}
/**
*
*
* <pre>
* For API calls that return a long running operation.
* Resource name of the long running operation.
* Format:
* `projects/{project}/locations/{location}/operations/{operation}`
* </pre>
*
* <code>string operation = 1;</code>
*
* @return The operation.
*/
public java.lang.String getOperation() {
java.lang.Object ref = "";
if (referenceCase_ == 1) {
ref = reference_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (referenceCase_ == 1) {
reference_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* For API calls that return a long running operation.
* Resource name of the long running operation.
* Format:
* `projects/{project}/locations/{location}/operations/{operation}`
* </pre>
*
* <code>string operation = 1;</code>
*
* @return The bytes for operation.
*/
public com.google.protobuf.ByteString getOperationBytes() {
java.lang.Object ref = "";
if (referenceCase_ == 1) {
ref = reference_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (referenceCase_ == 1) {
reference_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DATA_LABELING_JOB_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* For API calls that start a LabelingJob.
* Resource name of the LabelingJob.
* Format:
* `projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}`
* </pre>
*
* <code>string data_labeling_job = 2;</code>
*
* @return Whether the dataLabelingJob field is set.
*/
public boolean hasDataLabelingJob() {
return referenceCase_ == 2;
}
/**
*
*
* <pre>
* For API calls that start a LabelingJob.
* Resource name of the LabelingJob.
* Format:
* `projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}`
* </pre>
*
* <code>string data_labeling_job = 2;</code>
*
* @return The dataLabelingJob.
*/
public java.lang.String getDataLabelingJob() {
java.lang.Object ref = "";
if (referenceCase_ == 2) {
ref = reference_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (referenceCase_ == 2) {
reference_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* For API calls that start a LabelingJob.
* Resource name of the LabelingJob.
* Format:
* `projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}`
* </pre>
*
* <code>string data_labeling_job = 2;</code>
*
* @return The bytes for dataLabelingJob.
*/
public com.google.protobuf.ByteString getDataLabelingJobBytes() {
java.lang.Object ref = "";
if (referenceCase_ == 2) {
ref = reference_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (referenceCase_ == 2) {
reference_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int METHOD_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object method_ = "";
/**
*
*
* <pre>
* The method name of the API RPC call. For example,
* "/google.cloud.aiplatform.{apiVersion}.DatasetService.CreateDataset"
* </pre>
*
* <code>string method = 3;</code>
*
* @return The method.
*/
@java.lang.Override
public java.lang.String getMethod() {
java.lang.Object ref = method_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
method_ = s;
return s;
}
}
/**
*
*
* <pre>
* The method name of the API RPC call. For example,
* "/google.cloud.aiplatform.{apiVersion}.DatasetService.CreateDataset"
* </pre>
*
* <code>string method = 3;</code>
*
* @return The bytes for method.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMethodBytes() {
java.lang.Object ref = method_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
method_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (referenceCase_ == 1) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, reference_);
}
if (referenceCase_ == 2) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, reference_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(method_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, method_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (referenceCase_ == 1) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, reference_);
}
if (referenceCase_ == 2) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, reference_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(method_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, method_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.UserActionReference)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.UserActionReference other =
(com.google.cloud.aiplatform.v1beta1.UserActionReference) obj;
if (!getMethod().equals(other.getMethod())) return false;
if (!getReferenceCase().equals(other.getReferenceCase())) return false;
switch (referenceCase_) {
case 1:
if (!getOperation().equals(other.getOperation())) return false;
break;
case 2:
if (!getDataLabelingJob().equals(other.getDataLabelingJob())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + METHOD_FIELD_NUMBER;
hash = (53 * hash) + getMethod().hashCode();
switch (referenceCase_) {
case 1:
hash = (37 * hash) + OPERATION_FIELD_NUMBER;
hash = (53 * hash) + getOperation().hashCode();
break;
case 2:
hash = (37 * hash) + DATA_LABELING_JOB_FIELD_NUMBER;
hash = (53 * hash) + getDataLabelingJob().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.UserActionReference parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.UserActionReference parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UserActionReference parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.UserActionReference parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UserActionReference parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.UserActionReference parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UserActionReference parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.UserActionReference parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UserActionReference parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.UserActionReference parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UserActionReference parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.UserActionReference parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.UserActionReference prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* References an API call. It contains more information about long running
* operation and Jobs that are triggered by the API call.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.UserActionReference}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.UserActionReference)
com.google.cloud.aiplatform.v1beta1.UserActionReferenceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.UserActionReferenceProto
.internal_static_google_cloud_aiplatform_v1beta1_UserActionReference_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.UserActionReferenceProto
.internal_static_google_cloud_aiplatform_v1beta1_UserActionReference_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.UserActionReference.class,
com.google.cloud.aiplatform.v1beta1.UserActionReference.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.UserActionReference.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
method_ = "";
referenceCase_ = 0;
reference_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.UserActionReferenceProto
.internal_static_google_cloud_aiplatform_v1beta1_UserActionReference_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.UserActionReference getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.UserActionReference.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.UserActionReference build() {
com.google.cloud.aiplatform.v1beta1.UserActionReference result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.UserActionReference buildPartial() {
com.google.cloud.aiplatform.v1beta1.UserActionReference result =
new com.google.cloud.aiplatform.v1beta1.UserActionReference(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.aiplatform.v1beta1.UserActionReference result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.method_ = method_;
}
}
private void buildPartialOneofs(
com.google.cloud.aiplatform.v1beta1.UserActionReference result) {
result.referenceCase_ = referenceCase_;
result.reference_ = this.reference_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.UserActionReference) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.UserActionReference) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.UserActionReference other) {
if (other == com.google.cloud.aiplatform.v1beta1.UserActionReference.getDefaultInstance())
return this;
if (!other.getMethod().isEmpty()) {
method_ = other.method_;
bitField0_ |= 0x00000004;
onChanged();
}
switch (other.getReferenceCase()) {
case OPERATION:
{
referenceCase_ = 1;
reference_ = other.reference_;
onChanged();
break;
}
case DATA_LABELING_JOB:
{
referenceCase_ = 2;
reference_ = other.reference_;
onChanged();
break;
}
case REFERENCE_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
referenceCase_ = 1;
reference_ = s;
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
referenceCase_ = 2;
reference_ = s;
break;
} // case 18
case 26:
{
method_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int referenceCase_ = 0;
private java.lang.Object reference_;
public ReferenceCase getReferenceCase() {
return ReferenceCase.forNumber(referenceCase_);
}
public Builder clearReference() {
referenceCase_ = 0;
reference_ = null;
onChanged();
return this;
}
private int bitField0_;
/**
*
*
* <pre>
* For API calls that return a long running operation.
* Resource name of the long running operation.
* Format:
* `projects/{project}/locations/{location}/operations/{operation}`
* </pre>
*
* <code>string operation = 1;</code>
*
* @return Whether the operation field is set.
*/
@java.lang.Override
public boolean hasOperation() {
return referenceCase_ == 1;
}
/**
*
*
* <pre>
* For API calls that return a long running operation.
* Resource name of the long running operation.
* Format:
* `projects/{project}/locations/{location}/operations/{operation}`
* </pre>
*
* <code>string operation = 1;</code>
*
* @return The operation.
*/
@java.lang.Override
public java.lang.String getOperation() {
java.lang.Object ref = "";
if (referenceCase_ == 1) {
ref = reference_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (referenceCase_ == 1) {
reference_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* For API calls that return a long running operation.
* Resource name of the long running operation.
* Format:
* `projects/{project}/locations/{location}/operations/{operation}`
* </pre>
*
* <code>string operation = 1;</code>
*
* @return The bytes for operation.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOperationBytes() {
java.lang.Object ref = "";
if (referenceCase_ == 1) {
ref = reference_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (referenceCase_ == 1) {
reference_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* For API calls that return a long running operation.
* Resource name of the long running operation.
* Format:
* `projects/{project}/locations/{location}/operations/{operation}`
* </pre>
*
* <code>string operation = 1;</code>
*
* @param value The operation to set.
* @return This builder for chaining.
*/
public Builder setOperation(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
referenceCase_ = 1;
reference_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* For API calls that return a long running operation.
* Resource name of the long running operation.
* Format:
* `projects/{project}/locations/{location}/operations/{operation}`
* </pre>
*
* <code>string operation = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearOperation() {
if (referenceCase_ == 1) {
referenceCase_ = 0;
reference_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* For API calls that return a long running operation.
* Resource name of the long running operation.
* Format:
* `projects/{project}/locations/{location}/operations/{operation}`
* </pre>
*
* <code>string operation = 1;</code>
*
* @param value The bytes for operation to set.
* @return This builder for chaining.
*/
public Builder setOperationBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
referenceCase_ = 1;
reference_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* For API calls that start a LabelingJob.
* Resource name of the LabelingJob.
* Format:
* `projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}`
* </pre>
*
* <code>string data_labeling_job = 2;</code>
*
* @return Whether the dataLabelingJob field is set.
*/
@java.lang.Override
public boolean hasDataLabelingJob() {
return referenceCase_ == 2;
}
/**
*
*
* <pre>
* For API calls that start a LabelingJob.
* Resource name of the LabelingJob.
* Format:
* `projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}`
* </pre>
*
* <code>string data_labeling_job = 2;</code>
*
* @return The dataLabelingJob.
*/
@java.lang.Override
public java.lang.String getDataLabelingJob() {
java.lang.Object ref = "";
if (referenceCase_ == 2) {
ref = reference_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (referenceCase_ == 2) {
reference_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* For API calls that start a LabelingJob.
* Resource name of the LabelingJob.
* Format:
* `projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}`
* </pre>
*
* <code>string data_labeling_job = 2;</code>
*
* @return The bytes for dataLabelingJob.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDataLabelingJobBytes() {
java.lang.Object ref = "";
if (referenceCase_ == 2) {
ref = reference_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (referenceCase_ == 2) {
reference_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* For API calls that start a LabelingJob.
* Resource name of the LabelingJob.
* Format:
* `projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}`
* </pre>
*
* <code>string data_labeling_job = 2;</code>
*
* @param value The dataLabelingJob to set.
* @return This builder for chaining.
*/
public Builder setDataLabelingJob(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
referenceCase_ = 2;
reference_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* For API calls that start a LabelingJob.
* Resource name of the LabelingJob.
* Format:
* `projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}`
* </pre>
*
* <code>string data_labeling_job = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearDataLabelingJob() {
if (referenceCase_ == 2) {
referenceCase_ = 0;
reference_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* For API calls that start a LabelingJob.
* Resource name of the LabelingJob.
* Format:
* `projects/{project}/locations/{location}/dataLabelingJobs/{data_labeling_job}`
* </pre>
*
* <code>string data_labeling_job = 2;</code>
*
* @param value The bytes for dataLabelingJob to set.
* @return This builder for chaining.
*/
public Builder setDataLabelingJobBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
referenceCase_ = 2;
reference_ = value;
onChanged();
return this;
}
private java.lang.Object method_ = "";
/**
*
*
* <pre>
* The method name of the API RPC call. For example,
* "/google.cloud.aiplatform.{apiVersion}.DatasetService.CreateDataset"
* </pre>
*
* <code>string method = 3;</code>
*
* @return The method.
*/
public java.lang.String getMethod() {
java.lang.Object ref = method_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
method_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The method name of the API RPC call. For example,
* "/google.cloud.aiplatform.{apiVersion}.DatasetService.CreateDataset"
* </pre>
*
* <code>string method = 3;</code>
*
* @return The bytes for method.
*/
public com.google.protobuf.ByteString getMethodBytes() {
java.lang.Object ref = method_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
method_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The method name of the API RPC call. For example,
* "/google.cloud.aiplatform.{apiVersion}.DatasetService.CreateDataset"
* </pre>
*
* <code>string method = 3;</code>
*
* @param value The method to set.
* @return This builder for chaining.
*/
public Builder setMethod(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
method_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The method name of the API RPC call. For example,
* "/google.cloud.aiplatform.{apiVersion}.DatasetService.CreateDataset"
* </pre>
*
* <code>string method = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearMethod() {
method_ = getDefaultInstance().getMethod();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The method name of the API RPC call. For example,
* "/google.cloud.aiplatform.{apiVersion}.DatasetService.CreateDataset"
* </pre>
*
* <code>string method = 3;</code>
*
* @param value The bytes for method to set.
* @return This builder for chaining.
*/
public Builder setMethodBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
method_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.UserActionReference)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.UserActionReference)
private static final com.google.cloud.aiplatform.v1beta1.UserActionReference DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.UserActionReference();
}
public static com.google.cloud.aiplatform.v1beta1.UserActionReference getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UserActionReference> PARSER =
new com.google.protobuf.AbstractParser<UserActionReference>() {
@java.lang.Override
public UserActionReference parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UserActionReference> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UserActionReference> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.UserActionReference getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,689 | java-securesourcemanager/proto-google-cloud-securesourcemanager-v1/src/main/java/com/google/cloud/securesourcemanager/v1/CreateHookRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securesourcemanager/v1/secure_source_manager.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securesourcemanager.v1;
/**
*
*
* <pre>
* CreateHookRequest is the request for creating a hook.
* </pre>
*
* Protobuf type {@code google.cloud.securesourcemanager.v1.CreateHookRequest}
*/
public final class CreateHookRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securesourcemanager.v1.CreateHookRequest)
CreateHookRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateHookRequest.newBuilder() to construct.
private CreateHookRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateHookRequest() {
parent_ = "";
hookId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateHookRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_CreateHookRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_CreateHookRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securesourcemanager.v1.CreateHookRequest.class,
com.google.cloud.securesourcemanager.v1.CreateHookRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The repository in which to create the hook. Values are of the
* form
* `projects/{project_number}/locations/{location_id}/repositories/{repository_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The repository in which to create the hook. Values are of the
* form
* `projects/{project_number}/locations/{location_id}/repositories/{repository_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int HOOK_FIELD_NUMBER = 2;
private com.google.cloud.securesourcemanager.v1.Hook hook_;
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the hook field is set.
*/
@java.lang.Override
public boolean hasHook() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The hook.
*/
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.Hook getHook() {
return hook_ == null
? com.google.cloud.securesourcemanager.v1.Hook.getDefaultInstance()
: hook_;
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.HookOrBuilder getHookOrBuilder() {
return hook_ == null
? com.google.cloud.securesourcemanager.v1.Hook.getDefaultInstance()
: hook_;
}
public static final int HOOK_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object hookId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the hook, which will become the final component
* of the hook's resource name. This value restricts to lower-case letters,
* numbers, and hyphen, with the first character a letter, the last a letter
* or a number, and a 63 character maximum.
* </pre>
*
* <code>string hook_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The hookId.
*/
@java.lang.Override
public java.lang.String getHookId() {
java.lang.Object ref = hookId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
hookId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the hook, which will become the final component
* of the hook's resource name. This value restricts to lower-case letters,
* numbers, and hyphen, with the first character a letter, the last a letter
* or a number, and a 63 character maximum.
* </pre>
*
* <code>string hook_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for hookId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getHookIdBytes() {
java.lang.Object ref = hookId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
hookId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getHook());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hookId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, hookId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getHook());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hookId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, hookId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.securesourcemanager.v1.CreateHookRequest)) {
return super.equals(obj);
}
com.google.cloud.securesourcemanager.v1.CreateHookRequest other =
(com.google.cloud.securesourcemanager.v1.CreateHookRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasHook() != other.hasHook()) return false;
if (hasHook()) {
if (!getHook().equals(other.getHook())) return false;
}
if (!getHookId().equals(other.getHookId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasHook()) {
hash = (37 * hash) + HOOK_FIELD_NUMBER;
hash = (53 * hash) + getHook().hashCode();
}
hash = (37 * hash) + HOOK_ID_FIELD_NUMBER;
hash = (53 * hash) + getHookId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securesourcemanager.v1.CreateHookRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.CreateHookRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.CreateHookRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.CreateHookRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.CreateHookRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.CreateHookRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.CreateHookRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.CreateHookRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.CreateHookRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.CreateHookRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.CreateHookRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.CreateHookRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securesourcemanager.v1.CreateHookRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* CreateHookRequest is the request for creating a hook.
* </pre>
*
* Protobuf type {@code google.cloud.securesourcemanager.v1.CreateHookRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securesourcemanager.v1.CreateHookRequest)
com.google.cloud.securesourcemanager.v1.CreateHookRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_CreateHookRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_CreateHookRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securesourcemanager.v1.CreateHookRequest.class,
com.google.cloud.securesourcemanager.v1.CreateHookRequest.Builder.class);
}
// Construct using com.google.cloud.securesourcemanager.v1.CreateHookRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getHookFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
hook_ = null;
if (hookBuilder_ != null) {
hookBuilder_.dispose();
hookBuilder_ = null;
}
hookId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_CreateHookRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.CreateHookRequest getDefaultInstanceForType() {
return com.google.cloud.securesourcemanager.v1.CreateHookRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.CreateHookRequest build() {
com.google.cloud.securesourcemanager.v1.CreateHookRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.CreateHookRequest buildPartial() {
com.google.cloud.securesourcemanager.v1.CreateHookRequest result =
new com.google.cloud.securesourcemanager.v1.CreateHookRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.securesourcemanager.v1.CreateHookRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.hook_ = hookBuilder_ == null ? hook_ : hookBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.hookId_ = hookId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.securesourcemanager.v1.CreateHookRequest) {
return mergeFrom((com.google.cloud.securesourcemanager.v1.CreateHookRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.securesourcemanager.v1.CreateHookRequest other) {
if (other == com.google.cloud.securesourcemanager.v1.CreateHookRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasHook()) {
mergeHook(other.getHook());
}
if (!other.getHookId().isEmpty()) {
hookId_ = other.hookId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getHookFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
hookId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The repository in which to create the hook. Values are of the
* form
* `projects/{project_number}/locations/{location_id}/repositories/{repository_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The repository in which to create the hook. Values are of the
* form
* `projects/{project_number}/locations/{location_id}/repositories/{repository_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The repository in which to create the hook. Values are of the
* form
* `projects/{project_number}/locations/{location_id}/repositories/{repository_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The repository in which to create the hook. Values are of the
* form
* `projects/{project_number}/locations/{location_id}/repositories/{repository_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The repository in which to create the hook. Values are of the
* form
* `projects/{project_number}/locations/{location_id}/repositories/{repository_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.securesourcemanager.v1.Hook hook_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.Hook,
com.google.cloud.securesourcemanager.v1.Hook.Builder,
com.google.cloud.securesourcemanager.v1.HookOrBuilder>
hookBuilder_;
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the hook field is set.
*/
public boolean hasHook() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The hook.
*/
public com.google.cloud.securesourcemanager.v1.Hook getHook() {
if (hookBuilder_ == null) {
return hook_ == null
? com.google.cloud.securesourcemanager.v1.Hook.getDefaultInstance()
: hook_;
} else {
return hookBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setHook(com.google.cloud.securesourcemanager.v1.Hook value) {
if (hookBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
hook_ = value;
} else {
hookBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setHook(com.google.cloud.securesourcemanager.v1.Hook.Builder builderForValue) {
if (hookBuilder_ == null) {
hook_ = builderForValue.build();
} else {
hookBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeHook(com.google.cloud.securesourcemanager.v1.Hook value) {
if (hookBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& hook_ != null
&& hook_ != com.google.cloud.securesourcemanager.v1.Hook.getDefaultInstance()) {
getHookBuilder().mergeFrom(value);
} else {
hook_ = value;
}
} else {
hookBuilder_.mergeFrom(value);
}
if (hook_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearHook() {
bitField0_ = (bitField0_ & ~0x00000002);
hook_ = null;
if (hookBuilder_ != null) {
hookBuilder_.dispose();
hookBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.securesourcemanager.v1.Hook.Builder getHookBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getHookFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.securesourcemanager.v1.HookOrBuilder getHookOrBuilder() {
if (hookBuilder_ != null) {
return hookBuilder_.getMessageOrBuilder();
} else {
return hook_ == null
? com.google.cloud.securesourcemanager.v1.Hook.getDefaultInstance()
: hook_;
}
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securesourcemanager.v1.Hook hook = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.Hook,
com.google.cloud.securesourcemanager.v1.Hook.Builder,
com.google.cloud.securesourcemanager.v1.HookOrBuilder>
getHookFieldBuilder() {
if (hookBuilder_ == null) {
hookBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.Hook,
com.google.cloud.securesourcemanager.v1.Hook.Builder,
com.google.cloud.securesourcemanager.v1.HookOrBuilder>(
getHook(), getParentForChildren(), isClean());
hook_ = null;
}
return hookBuilder_;
}
private java.lang.Object hookId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the hook, which will become the final component
* of the hook's resource name. This value restricts to lower-case letters,
* numbers, and hyphen, with the first character a letter, the last a letter
* or a number, and a 63 character maximum.
* </pre>
*
* <code>string hook_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The hookId.
*/
public java.lang.String getHookId() {
java.lang.Object ref = hookId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
hookId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the hook, which will become the final component
* of the hook's resource name. This value restricts to lower-case letters,
* numbers, and hyphen, with the first character a letter, the last a letter
* or a number, and a 63 character maximum.
* </pre>
*
* <code>string hook_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for hookId.
*/
public com.google.protobuf.ByteString getHookIdBytes() {
java.lang.Object ref = hookId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
hookId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the hook, which will become the final component
* of the hook's resource name. This value restricts to lower-case letters,
* numbers, and hyphen, with the first character a letter, the last a letter
* or a number, and a 63 character maximum.
* </pre>
*
* <code>string hook_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The hookId to set.
* @return This builder for chaining.
*/
public Builder setHookId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
hookId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the hook, which will become the final component
* of the hook's resource name. This value restricts to lower-case letters,
* numbers, and hyphen, with the first character a letter, the last a letter
* or a number, and a 63 character maximum.
* </pre>
*
* <code>string hook_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearHookId() {
hookId_ = getDefaultInstance().getHookId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the hook, which will become the final component
* of the hook's resource name. This value restricts to lower-case letters,
* numbers, and hyphen, with the first character a letter, the last a letter
* or a number, and a 63 character maximum.
* </pre>
*
* <code>string hook_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for hookId to set.
* @return This builder for chaining.
*/
public Builder setHookIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
hookId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securesourcemanager.v1.CreateHookRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.securesourcemanager.v1.CreateHookRequest)
private static final com.google.cloud.securesourcemanager.v1.CreateHookRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.securesourcemanager.v1.CreateHookRequest();
}
public static com.google.cloud.securesourcemanager.v1.CreateHookRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateHookRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateHookRequest>() {
@java.lang.Override
public CreateHookRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateHookRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateHookRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.CreateHookRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,757 | java-meet/proto-google-cloud-meet-v2/src/main/java/com/google/apps/meet/v2/ListTranscriptEntriesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/apps/meet/v2/service.proto
// Protobuf Java Version: 3.25.8
package com.google.apps.meet.v2;
/**
*
*
* <pre>
* Response for ListTranscriptEntries method.
* </pre>
*
* Protobuf type {@code google.apps.meet.v2.ListTranscriptEntriesResponse}
*/
public final class ListTranscriptEntriesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.apps.meet.v2.ListTranscriptEntriesResponse)
ListTranscriptEntriesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListTranscriptEntriesResponse.newBuilder() to construct.
private ListTranscriptEntriesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListTranscriptEntriesResponse() {
transcriptEntries_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListTranscriptEntriesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.apps.meet.v2.ServiceProto
.internal_static_google_apps_meet_v2_ListTranscriptEntriesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.apps.meet.v2.ServiceProto
.internal_static_google_apps_meet_v2_ListTranscriptEntriesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.apps.meet.v2.ListTranscriptEntriesResponse.class,
com.google.apps.meet.v2.ListTranscriptEntriesResponse.Builder.class);
}
public static final int TRANSCRIPT_ENTRIES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.apps.meet.v2.TranscriptEntry> transcriptEntries_;
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.apps.meet.v2.TranscriptEntry> getTranscriptEntriesList() {
return transcriptEntries_;
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.apps.meet.v2.TranscriptEntryOrBuilder>
getTranscriptEntriesOrBuilderList() {
return transcriptEntries_;
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
@java.lang.Override
public int getTranscriptEntriesCount() {
return transcriptEntries_.size();
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
@java.lang.Override
public com.google.apps.meet.v2.TranscriptEntry getTranscriptEntries(int index) {
return transcriptEntries_.get(index);
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
@java.lang.Override
public com.google.apps.meet.v2.TranscriptEntryOrBuilder getTranscriptEntriesOrBuilder(int index) {
return transcriptEntries_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the transcript entries. Unset if all entries are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the transcript entries. Unset if all entries are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < transcriptEntries_.size(); i++) {
output.writeMessage(1, transcriptEntries_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < transcriptEntries_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(1, transcriptEntries_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.apps.meet.v2.ListTranscriptEntriesResponse)) {
return super.equals(obj);
}
com.google.apps.meet.v2.ListTranscriptEntriesResponse other =
(com.google.apps.meet.v2.ListTranscriptEntriesResponse) obj;
if (!getTranscriptEntriesList().equals(other.getTranscriptEntriesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getTranscriptEntriesCount() > 0) {
hash = (37 * hash) + TRANSCRIPT_ENTRIES_FIELD_NUMBER;
hash = (53 * hash) + getTranscriptEntriesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.apps.meet.v2.ListTranscriptEntriesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.apps.meet.v2.ListTranscriptEntriesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.apps.meet.v2.ListTranscriptEntriesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.apps.meet.v2.ListTranscriptEntriesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.apps.meet.v2.ListTranscriptEntriesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.apps.meet.v2.ListTranscriptEntriesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.apps.meet.v2.ListTranscriptEntriesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.apps.meet.v2.ListTranscriptEntriesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.apps.meet.v2.ListTranscriptEntriesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.apps.meet.v2.ListTranscriptEntriesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.apps.meet.v2.ListTranscriptEntriesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.apps.meet.v2.ListTranscriptEntriesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.apps.meet.v2.ListTranscriptEntriesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for ListTranscriptEntries method.
* </pre>
*
* Protobuf type {@code google.apps.meet.v2.ListTranscriptEntriesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.apps.meet.v2.ListTranscriptEntriesResponse)
com.google.apps.meet.v2.ListTranscriptEntriesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.apps.meet.v2.ServiceProto
.internal_static_google_apps_meet_v2_ListTranscriptEntriesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.apps.meet.v2.ServiceProto
.internal_static_google_apps_meet_v2_ListTranscriptEntriesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.apps.meet.v2.ListTranscriptEntriesResponse.class,
com.google.apps.meet.v2.ListTranscriptEntriesResponse.Builder.class);
}
// Construct using com.google.apps.meet.v2.ListTranscriptEntriesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (transcriptEntriesBuilder_ == null) {
transcriptEntries_ = java.util.Collections.emptyList();
} else {
transcriptEntries_ = null;
transcriptEntriesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.apps.meet.v2.ServiceProto
.internal_static_google_apps_meet_v2_ListTranscriptEntriesResponse_descriptor;
}
@java.lang.Override
public com.google.apps.meet.v2.ListTranscriptEntriesResponse getDefaultInstanceForType() {
return com.google.apps.meet.v2.ListTranscriptEntriesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.apps.meet.v2.ListTranscriptEntriesResponse build() {
com.google.apps.meet.v2.ListTranscriptEntriesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.apps.meet.v2.ListTranscriptEntriesResponse buildPartial() {
com.google.apps.meet.v2.ListTranscriptEntriesResponse result =
new com.google.apps.meet.v2.ListTranscriptEntriesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.apps.meet.v2.ListTranscriptEntriesResponse result) {
if (transcriptEntriesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
transcriptEntries_ = java.util.Collections.unmodifiableList(transcriptEntries_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.transcriptEntries_ = transcriptEntries_;
} else {
result.transcriptEntries_ = transcriptEntriesBuilder_.build();
}
}
private void buildPartial0(com.google.apps.meet.v2.ListTranscriptEntriesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.apps.meet.v2.ListTranscriptEntriesResponse) {
return mergeFrom((com.google.apps.meet.v2.ListTranscriptEntriesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.apps.meet.v2.ListTranscriptEntriesResponse other) {
if (other == com.google.apps.meet.v2.ListTranscriptEntriesResponse.getDefaultInstance())
return this;
if (transcriptEntriesBuilder_ == null) {
if (!other.transcriptEntries_.isEmpty()) {
if (transcriptEntries_.isEmpty()) {
transcriptEntries_ = other.transcriptEntries_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTranscriptEntriesIsMutable();
transcriptEntries_.addAll(other.transcriptEntries_);
}
onChanged();
}
} else {
if (!other.transcriptEntries_.isEmpty()) {
if (transcriptEntriesBuilder_.isEmpty()) {
transcriptEntriesBuilder_.dispose();
transcriptEntriesBuilder_ = null;
transcriptEntries_ = other.transcriptEntries_;
bitField0_ = (bitField0_ & ~0x00000001);
transcriptEntriesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getTranscriptEntriesFieldBuilder()
: null;
} else {
transcriptEntriesBuilder_.addAllMessages(other.transcriptEntries_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.apps.meet.v2.TranscriptEntry m =
input.readMessage(
com.google.apps.meet.v2.TranscriptEntry.parser(), extensionRegistry);
if (transcriptEntriesBuilder_ == null) {
ensureTranscriptEntriesIsMutable();
transcriptEntries_.add(m);
} else {
transcriptEntriesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.apps.meet.v2.TranscriptEntry> transcriptEntries_ =
java.util.Collections.emptyList();
private void ensureTranscriptEntriesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
transcriptEntries_ =
new java.util.ArrayList<com.google.apps.meet.v2.TranscriptEntry>(transcriptEntries_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.apps.meet.v2.TranscriptEntry,
com.google.apps.meet.v2.TranscriptEntry.Builder,
com.google.apps.meet.v2.TranscriptEntryOrBuilder>
transcriptEntriesBuilder_;
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
public java.util.List<com.google.apps.meet.v2.TranscriptEntry> getTranscriptEntriesList() {
if (transcriptEntriesBuilder_ == null) {
return java.util.Collections.unmodifiableList(transcriptEntries_);
} else {
return transcriptEntriesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
public int getTranscriptEntriesCount() {
if (transcriptEntriesBuilder_ == null) {
return transcriptEntries_.size();
} else {
return transcriptEntriesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
public com.google.apps.meet.v2.TranscriptEntry getTranscriptEntries(int index) {
if (transcriptEntriesBuilder_ == null) {
return transcriptEntries_.get(index);
} else {
return transcriptEntriesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
public Builder setTranscriptEntries(int index, com.google.apps.meet.v2.TranscriptEntry value) {
if (transcriptEntriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTranscriptEntriesIsMutable();
transcriptEntries_.set(index, value);
onChanged();
} else {
transcriptEntriesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
public Builder setTranscriptEntries(
int index, com.google.apps.meet.v2.TranscriptEntry.Builder builderForValue) {
if (transcriptEntriesBuilder_ == null) {
ensureTranscriptEntriesIsMutable();
transcriptEntries_.set(index, builderForValue.build());
onChanged();
} else {
transcriptEntriesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
public Builder addTranscriptEntries(com.google.apps.meet.v2.TranscriptEntry value) {
if (transcriptEntriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTranscriptEntriesIsMutable();
transcriptEntries_.add(value);
onChanged();
} else {
transcriptEntriesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
public Builder addTranscriptEntries(int index, com.google.apps.meet.v2.TranscriptEntry value) {
if (transcriptEntriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTranscriptEntriesIsMutable();
transcriptEntries_.add(index, value);
onChanged();
} else {
transcriptEntriesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
public Builder addTranscriptEntries(
com.google.apps.meet.v2.TranscriptEntry.Builder builderForValue) {
if (transcriptEntriesBuilder_ == null) {
ensureTranscriptEntriesIsMutable();
transcriptEntries_.add(builderForValue.build());
onChanged();
} else {
transcriptEntriesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
public Builder addTranscriptEntries(
int index, com.google.apps.meet.v2.TranscriptEntry.Builder builderForValue) {
if (transcriptEntriesBuilder_ == null) {
ensureTranscriptEntriesIsMutable();
transcriptEntries_.add(index, builderForValue.build());
onChanged();
} else {
transcriptEntriesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
public Builder addAllTranscriptEntries(
java.lang.Iterable<? extends com.google.apps.meet.v2.TranscriptEntry> values) {
if (transcriptEntriesBuilder_ == null) {
ensureTranscriptEntriesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, transcriptEntries_);
onChanged();
} else {
transcriptEntriesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
public Builder clearTranscriptEntries() {
if (transcriptEntriesBuilder_ == null) {
transcriptEntries_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
transcriptEntriesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
public Builder removeTranscriptEntries(int index) {
if (transcriptEntriesBuilder_ == null) {
ensureTranscriptEntriesIsMutable();
transcriptEntries_.remove(index);
onChanged();
} else {
transcriptEntriesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
public com.google.apps.meet.v2.TranscriptEntry.Builder getTranscriptEntriesBuilder(int index) {
return getTranscriptEntriesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
public com.google.apps.meet.v2.TranscriptEntryOrBuilder getTranscriptEntriesOrBuilder(
int index) {
if (transcriptEntriesBuilder_ == null) {
return transcriptEntries_.get(index);
} else {
return transcriptEntriesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
public java.util.List<? extends com.google.apps.meet.v2.TranscriptEntryOrBuilder>
getTranscriptEntriesOrBuilderList() {
if (transcriptEntriesBuilder_ != null) {
return transcriptEntriesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(transcriptEntries_);
}
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
public com.google.apps.meet.v2.TranscriptEntry.Builder addTranscriptEntriesBuilder() {
return getTranscriptEntriesFieldBuilder()
.addBuilder(com.google.apps.meet.v2.TranscriptEntry.getDefaultInstance());
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
public com.google.apps.meet.v2.TranscriptEntry.Builder addTranscriptEntriesBuilder(int index) {
return getTranscriptEntriesFieldBuilder()
.addBuilder(index, com.google.apps.meet.v2.TranscriptEntry.getDefaultInstance());
}
/**
*
*
* <pre>
* List of TranscriptEntries in one page.
* </pre>
*
* <code>repeated .google.apps.meet.v2.TranscriptEntry transcript_entries = 1;</code>
*/
public java.util.List<com.google.apps.meet.v2.TranscriptEntry.Builder>
getTranscriptEntriesBuilderList() {
return getTranscriptEntriesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.apps.meet.v2.TranscriptEntry,
com.google.apps.meet.v2.TranscriptEntry.Builder,
com.google.apps.meet.v2.TranscriptEntryOrBuilder>
getTranscriptEntriesFieldBuilder() {
if (transcriptEntriesBuilder_ == null) {
transcriptEntriesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.apps.meet.v2.TranscriptEntry,
com.google.apps.meet.v2.TranscriptEntry.Builder,
com.google.apps.meet.v2.TranscriptEntryOrBuilder>(
transcriptEntries_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
transcriptEntries_ = null;
}
return transcriptEntriesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the transcript entries. Unset if all entries are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the transcript entries. Unset if all entries are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the transcript entries. Unset if all entries are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the transcript entries. Unset if all entries are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to be circulated back for further List call if current List doesn't
* include all the transcript entries. Unset if all entries are returned.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.apps.meet.v2.ListTranscriptEntriesResponse)
}
// @@protoc_insertion_point(class_scope:google.apps.meet.v2.ListTranscriptEntriesResponse)
private static final com.google.apps.meet.v2.ListTranscriptEntriesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.apps.meet.v2.ListTranscriptEntriesResponse();
}
public static com.google.apps.meet.v2.ListTranscriptEntriesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListTranscriptEntriesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListTranscriptEntriesResponse>() {
@java.lang.Override
public ListTranscriptEntriesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListTranscriptEntriesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListTranscriptEntriesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.apps.meet.v2.ListTranscriptEntriesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,951 | java-aiplatform/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/LlmUtilityServiceClient.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.aiplatform.v1beta1;
import com.google.api.core.ApiFuture;
import com.google.api.core.ApiFutures;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.paging.AbstractFixedSizeCollection;
import com.google.api.gax.paging.AbstractPage;
import com.google.api.gax.paging.AbstractPagedListResponse;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.aiplatform.v1beta1.stub.LlmUtilityServiceStub;
import com.google.cloud.aiplatform.v1beta1.stub.LlmUtilityServiceStubSettings;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.protobuf.Value;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Service Description: Service for LLM related utility functions.
*
* <p>This class provides the ability to make remote calls to the backing service through method
* calls that map to API methods. Sample code to get started:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LlmUtilityServiceClient llmUtilityServiceClient = LlmUtilityServiceClient.create()) {
* EndpointName endpoint =
* EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]");
* List<Value> instances = new ArrayList<>();
* ComputeTokensResponse response = llmUtilityServiceClient.computeTokens(endpoint, instances);
* }
* }</pre>
*
* <p>Note: close() needs to be called on the LlmUtilityServiceClient object to clean up resources
* such as threads. In the example above, try-with-resources is used, which automatically calls
* close().
*
* <table>
* <caption>Methods</caption>
* <tr>
* <th>Method</th>
* <th>Description</th>
* <th>Method Variants</th>
* </tr>
* <tr>
* <td><p> ComputeTokens</td>
* <td><p> Return a list of tokens based on the input text.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> computeTokens(ComputeTokensRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> computeTokens(EndpointName endpoint, List<Value> instances)
* <li><p> computeTokens(String endpoint, List<Value> instances)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> computeTokensCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> ListLocations</td>
* <td><p> Lists information about the supported locations for this service.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> listLocations(ListLocationsRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> listLocationsPagedCallable()
* <li><p> listLocationsCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> GetLocation</td>
* <td><p> Gets information about a location.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> getLocation(GetLocationRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> getLocationCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> SetIamPolicy</td>
* <td><p> Sets the access control policy on the specified resource. Replacesany existing policy.
* <p> Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED`errors.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> setIamPolicy(SetIamPolicyRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> setIamPolicyCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> GetIamPolicy</td>
* <td><p> Gets the access control policy for a resource. Returns an empty policyif the resource exists and does not have a policy set.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> getIamPolicy(GetIamPolicyRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> getIamPolicyCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> TestIamPermissions</td>
* <td><p> Returns permissions that a caller has on the specified resource. If theresource does not exist, this will return an empty set ofpermissions, not a `NOT_FOUND` error.
* <p> Note: This operation is designed to be used for buildingpermission-aware UIs and command-line tools, not for authorizationchecking. This operation may "fail open" without warning.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> testIamPermissions(TestIamPermissionsRequest request)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> testIamPermissionsCallable()
* </ul>
* </td>
* </tr>
* </table>
*
* <p>See the individual methods for example code.
*
* <p>Many parameters require resource names to be formatted in a particular way. To assist with
* these names, this class includes a format method for each type of name, and additionally a parse
* method to extract the individual identifiers contained within names that are returned.
*
* <p>This class can be customized by passing in a custom instance of LlmUtilityServiceSettings to
* create(). For example:
*
* <p>To customize credentials:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* LlmUtilityServiceSettings llmUtilityServiceSettings =
* LlmUtilityServiceSettings.newBuilder()
* .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
* .build();
* LlmUtilityServiceClient llmUtilityServiceClient =
* LlmUtilityServiceClient.create(llmUtilityServiceSettings);
* }</pre>
*
* <p>To customize the endpoint:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* LlmUtilityServiceSettings llmUtilityServiceSettings =
* LlmUtilityServiceSettings.newBuilder().setEndpoint(myEndpoint).build();
* LlmUtilityServiceClient llmUtilityServiceClient =
* LlmUtilityServiceClient.create(llmUtilityServiceSettings);
* }</pre>
*
* <p>Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@BetaApi
@Generated("by gapic-generator-java")
public class LlmUtilityServiceClient implements BackgroundResource {
private final LlmUtilityServiceSettings settings;
private final LlmUtilityServiceStub stub;
/** Constructs an instance of LlmUtilityServiceClient with default settings. */
public static final LlmUtilityServiceClient create() throws IOException {
return create(LlmUtilityServiceSettings.newBuilder().build());
}
/**
* Constructs an instance of LlmUtilityServiceClient, using the given settings. The channels are
* created based on the settings passed in, or defaults for any settings that are not set.
*/
public static final LlmUtilityServiceClient create(LlmUtilityServiceSettings settings)
throws IOException {
return new LlmUtilityServiceClient(settings);
}
/**
* Constructs an instance of LlmUtilityServiceClient, using the given stub for making calls. This
* is for advanced usage - prefer using create(LlmUtilityServiceSettings).
*/
public static final LlmUtilityServiceClient create(LlmUtilityServiceStub stub) {
return new LlmUtilityServiceClient(stub);
}
/**
* Constructs an instance of LlmUtilityServiceClient, using the given settings. This is protected
* so that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected LlmUtilityServiceClient(LlmUtilityServiceSettings settings) throws IOException {
this.settings = settings;
this.stub = ((LlmUtilityServiceStubSettings) settings.getStubSettings()).createStub();
}
protected LlmUtilityServiceClient(LlmUtilityServiceStub stub) {
this.settings = null;
this.stub = stub;
}
public final LlmUtilityServiceSettings getSettings() {
return settings;
}
public LlmUtilityServiceStub getStub() {
return stub;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Return a list of tokens based on the input text.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LlmUtilityServiceClient llmUtilityServiceClient = LlmUtilityServiceClient.create()) {
* EndpointName endpoint =
* EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]");
* List<Value> instances = new ArrayList<>();
* ComputeTokensResponse response = llmUtilityServiceClient.computeTokens(endpoint, instances);
* }
* }</pre>
*
* @param endpoint Required. The name of the Endpoint requested to get lists of tokens and token
* ids.
* @param instances Optional. The instances that are the input to token computing API call. Schema
* is identical to the prediction schema of the text model, even for the non-text models, like
* chat models, or Codey models.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ComputeTokensResponse computeTokens(EndpointName endpoint, List<Value> instances) {
ComputeTokensRequest request =
ComputeTokensRequest.newBuilder()
.setEndpoint(endpoint == null ? null : endpoint.toString())
.addAllInstances(instances)
.build();
return computeTokens(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Return a list of tokens based on the input text.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LlmUtilityServiceClient llmUtilityServiceClient = LlmUtilityServiceClient.create()) {
* String endpoint =
* EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]")
* .toString();
* List<Value> instances = new ArrayList<>();
* ComputeTokensResponse response = llmUtilityServiceClient.computeTokens(endpoint, instances);
* }
* }</pre>
*
* @param endpoint Required. The name of the Endpoint requested to get lists of tokens and token
* ids.
* @param instances Optional. The instances that are the input to token computing API call. Schema
* is identical to the prediction schema of the text model, even for the non-text models, like
* chat models, or Codey models.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ComputeTokensResponse computeTokens(String endpoint, List<Value> instances) {
ComputeTokensRequest request =
ComputeTokensRequest.newBuilder().setEndpoint(endpoint).addAllInstances(instances).build();
return computeTokens(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Return a list of tokens based on the input text.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LlmUtilityServiceClient llmUtilityServiceClient = LlmUtilityServiceClient.create()) {
* ComputeTokensRequest request =
* ComputeTokensRequest.newBuilder()
* .setEndpoint(
* EndpointName.ofProjectLocationEndpointName(
* "[PROJECT]", "[LOCATION]", "[ENDPOINT]")
* .toString())
* .addAllInstances(new ArrayList<Value>())
* .setModel("model104069929")
* .addAllContents(new ArrayList<Content>())
* .build();
* ComputeTokensResponse response = llmUtilityServiceClient.computeTokens(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ComputeTokensResponse computeTokens(ComputeTokensRequest request) {
return computeTokensCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Return a list of tokens based on the input text.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LlmUtilityServiceClient llmUtilityServiceClient = LlmUtilityServiceClient.create()) {
* ComputeTokensRequest request =
* ComputeTokensRequest.newBuilder()
* .setEndpoint(
* EndpointName.ofProjectLocationEndpointName(
* "[PROJECT]", "[LOCATION]", "[ENDPOINT]")
* .toString())
* .addAllInstances(new ArrayList<Value>())
* .setModel("model104069929")
* .addAllContents(new ArrayList<Content>())
* .build();
* ApiFuture<ComputeTokensResponse> future =
* llmUtilityServiceClient.computeTokensCallable().futureCall(request);
* // Do something.
* ComputeTokensResponse response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<ComputeTokensRequest, ComputeTokensResponse> computeTokensCallable() {
return stub.computeTokensCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists information about the supported locations for this service.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LlmUtilityServiceClient llmUtilityServiceClient = LlmUtilityServiceClient.create()) {
* ListLocationsRequest request =
* ListLocationsRequest.newBuilder()
* .setName("name3373707")
* .setFilter("filter-1274492040")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* for (Location element : llmUtilityServiceClient.listLocations(request).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListLocationsPagedResponse listLocations(ListLocationsRequest request) {
return listLocationsPagedCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists information about the supported locations for this service.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LlmUtilityServiceClient llmUtilityServiceClient = LlmUtilityServiceClient.create()) {
* ListLocationsRequest request =
* ListLocationsRequest.newBuilder()
* .setName("name3373707")
* .setFilter("filter-1274492040")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* ApiFuture<Location> future =
* llmUtilityServiceClient.listLocationsPagedCallable().futureCall(request);
* // Do something.
* for (Location element : future.get().iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*/
public final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable() {
return stub.listLocationsPagedCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists information about the supported locations for this service.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LlmUtilityServiceClient llmUtilityServiceClient = LlmUtilityServiceClient.create()) {
* ListLocationsRequest request =
* ListLocationsRequest.newBuilder()
* .setName("name3373707")
* .setFilter("filter-1274492040")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* while (true) {
* ListLocationsResponse response =
* llmUtilityServiceClient.listLocationsCallable().call(request);
* for (Location element : response.getLocationsList()) {
* // doThingsWith(element);
* }
* String nextPageToken = response.getNextPageToken();
* if (!Strings.isNullOrEmpty(nextPageToken)) {
* request = request.toBuilder().setPageToken(nextPageToken).build();
* } else {
* break;
* }
* }
* }
* }</pre>
*/
public final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() {
return stub.listLocationsCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Gets information about a location.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LlmUtilityServiceClient llmUtilityServiceClient = LlmUtilityServiceClient.create()) {
* GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
* Location response = llmUtilityServiceClient.getLocation(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Location getLocation(GetLocationRequest request) {
return getLocationCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Gets information about a location.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LlmUtilityServiceClient llmUtilityServiceClient = LlmUtilityServiceClient.create()) {
* GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
* ApiFuture<Location> future =
* llmUtilityServiceClient.getLocationCallable().futureCall(request);
* // Do something.
* Location response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<GetLocationRequest, Location> getLocationCallable() {
return stub.getLocationCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Sets the access control policy on the specified resource. Replacesany existing policy.
*
* <p>Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED`errors.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LlmUtilityServiceClient llmUtilityServiceClient = LlmUtilityServiceClient.create()) {
* SetIamPolicyRequest request =
* SetIamPolicyRequest.newBuilder()
* .setResource(
* EndpointName.ofProjectLocationEndpointName(
* "[PROJECT]", "[LOCATION]", "[ENDPOINT]")
* .toString())
* .setPolicy(Policy.newBuilder().build())
* .setUpdateMask(FieldMask.newBuilder().build())
* .build();
* Policy response = llmUtilityServiceClient.setIamPolicy(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Policy setIamPolicy(SetIamPolicyRequest request) {
return setIamPolicyCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Sets the access control policy on the specified resource. Replacesany existing policy.
*
* <p>Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED`errors.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LlmUtilityServiceClient llmUtilityServiceClient = LlmUtilityServiceClient.create()) {
* SetIamPolicyRequest request =
* SetIamPolicyRequest.newBuilder()
* .setResource(
* EndpointName.ofProjectLocationEndpointName(
* "[PROJECT]", "[LOCATION]", "[ENDPOINT]")
* .toString())
* .setPolicy(Policy.newBuilder().build())
* .setUpdateMask(FieldMask.newBuilder().build())
* .build();
* ApiFuture<Policy> future = llmUtilityServiceClient.setIamPolicyCallable().futureCall(request);
* // Do something.
* Policy response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable() {
return stub.setIamPolicyCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Gets the access control policy for a resource. Returns an empty policyif the resource exists
* and does not have a policy set.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LlmUtilityServiceClient llmUtilityServiceClient = LlmUtilityServiceClient.create()) {
* GetIamPolicyRequest request =
* GetIamPolicyRequest.newBuilder()
* .setResource(
* EndpointName.ofProjectLocationEndpointName(
* "[PROJECT]", "[LOCATION]", "[ENDPOINT]")
* .toString())
* .setOptions(GetPolicyOptions.newBuilder().build())
* .build();
* Policy response = llmUtilityServiceClient.getIamPolicy(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Policy getIamPolicy(GetIamPolicyRequest request) {
return getIamPolicyCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Gets the access control policy for a resource. Returns an empty policyif the resource exists
* and does not have a policy set.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LlmUtilityServiceClient llmUtilityServiceClient = LlmUtilityServiceClient.create()) {
* GetIamPolicyRequest request =
* GetIamPolicyRequest.newBuilder()
* .setResource(
* EndpointName.ofProjectLocationEndpointName(
* "[PROJECT]", "[LOCATION]", "[ENDPOINT]")
* .toString())
* .setOptions(GetPolicyOptions.newBuilder().build())
* .build();
* ApiFuture<Policy> future = llmUtilityServiceClient.getIamPolicyCallable().futureCall(request);
* // Do something.
* Policy response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable() {
return stub.getIamPolicyCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns permissions that a caller has on the specified resource. If theresource does not exist,
* this will return an empty set ofpermissions, not a `NOT_FOUND` error.
*
* <p>Note: This operation is designed to be used for buildingpermission-aware UIs and
* command-line tools, not for authorizationchecking. This operation may "fail open" without
* warning.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LlmUtilityServiceClient llmUtilityServiceClient = LlmUtilityServiceClient.create()) {
* TestIamPermissionsRequest request =
* TestIamPermissionsRequest.newBuilder()
* .setResource(
* EndpointName.ofProjectLocationEndpointName(
* "[PROJECT]", "[LOCATION]", "[ENDPOINT]")
* .toString())
* .addAllPermissions(new ArrayList<String>())
* .build();
* TestIamPermissionsResponse response = llmUtilityServiceClient.testIamPermissions(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final TestIamPermissionsResponse testIamPermissions(TestIamPermissionsRequest request) {
return testIamPermissionsCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns permissions that a caller has on the specified resource. If theresource does not exist,
* this will return an empty set ofpermissions, not a `NOT_FOUND` error.
*
* <p>Note: This operation is designed to be used for buildingpermission-aware UIs and
* command-line tools, not for authorizationchecking. This operation may "fail open" without
* warning.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (LlmUtilityServiceClient llmUtilityServiceClient = LlmUtilityServiceClient.create()) {
* TestIamPermissionsRequest request =
* TestIamPermissionsRequest.newBuilder()
* .setResource(
* EndpointName.ofProjectLocationEndpointName(
* "[PROJECT]", "[LOCATION]", "[ENDPOINT]")
* .toString())
* .addAllPermissions(new ArrayList<String>())
* .build();
* ApiFuture<TestIamPermissionsResponse> future =
* llmUtilityServiceClient.testIamPermissionsCallable().futureCall(request);
* // Do something.
* TestIamPermissionsResponse response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsCallable() {
return stub.testIamPermissionsCallable();
}
@Override
public final void close() {
stub.close();
}
@Override
public void shutdown() {
stub.shutdown();
}
@Override
public boolean isShutdown() {
return stub.isShutdown();
}
@Override
public boolean isTerminated() {
return stub.isTerminated();
}
@Override
public void shutdownNow() {
stub.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return stub.awaitTermination(duration, unit);
}
public static class ListLocationsPagedResponse
extends AbstractPagedListResponse<
ListLocationsRequest,
ListLocationsResponse,
Location,
ListLocationsPage,
ListLocationsFixedSizeCollection> {
public static ApiFuture<ListLocationsPagedResponse> createAsync(
PageContext<ListLocationsRequest, ListLocationsResponse, Location> context,
ApiFuture<ListLocationsResponse> futureResponse) {
ApiFuture<ListLocationsPage> futurePage =
ListLocationsPage.createEmptyPage().createPageAsync(context, futureResponse);
return ApiFutures.transform(
futurePage,
input -> new ListLocationsPagedResponse(input),
MoreExecutors.directExecutor());
}
private ListLocationsPagedResponse(ListLocationsPage page) {
super(page, ListLocationsFixedSizeCollection.createEmptyCollection());
}
}
public static class ListLocationsPage
extends AbstractPage<
ListLocationsRequest, ListLocationsResponse, Location, ListLocationsPage> {
private ListLocationsPage(
PageContext<ListLocationsRequest, ListLocationsResponse, Location> context,
ListLocationsResponse response) {
super(context, response);
}
private static ListLocationsPage createEmptyPage() {
return new ListLocationsPage(null, null);
}
@Override
protected ListLocationsPage createPage(
PageContext<ListLocationsRequest, ListLocationsResponse, Location> context,
ListLocationsResponse response) {
return new ListLocationsPage(context, response);
}
@Override
public ApiFuture<ListLocationsPage> createPageAsync(
PageContext<ListLocationsRequest, ListLocationsResponse, Location> context,
ApiFuture<ListLocationsResponse> futureResponse) {
return super.createPageAsync(context, futureResponse);
}
}
public static class ListLocationsFixedSizeCollection
extends AbstractFixedSizeCollection<
ListLocationsRequest,
ListLocationsResponse,
Location,
ListLocationsPage,
ListLocationsFixedSizeCollection> {
private ListLocationsFixedSizeCollection(List<ListLocationsPage> pages, int collectionSize) {
super(pages, collectionSize);
}
private static ListLocationsFixedSizeCollection createEmptyCollection() {
return new ListLocationsFixedSizeCollection(null, 0);
}
@Override
protected ListLocationsFixedSizeCollection createCollection(
List<ListLocationsPage> pages, int collectionSize) {
return new ListLocationsFixedSizeCollection(pages, collectionSize);
}
}
}
|
google/guava | 37,885 | android/guava/src/com/google/common/collect/Streams.java | /*
* Copyright (C) 2015 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.NullnessCasts.uncheckedCastNullableTToT;
import static com.google.common.collect.SneakyThrows.sneakyThrow;
import static java.lang.Math.min;
import static java.util.Objects.requireNonNull;
import com.google.common.annotations.Beta;
import com.google.common.annotations.GwtCompatible;
import com.google.common.math.LongMath;
import com.google.errorprone.annotations.InlineMe;
import com.google.errorprone.annotations.InlineMeValidationDisabled;
import java.util.ArrayDeque;
import java.util.Collection;
import java.util.Deque;
import java.util.Iterator;
import java.util.OptionalDouble;
import java.util.OptionalInt;
import java.util.OptionalLong;
import java.util.PrimitiveIterator;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.Spliterators.AbstractSpliterator;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.DoubleConsumer;
import java.util.function.IntConsumer;
import java.util.function.LongConsumer;
import java.util.stream.BaseStream;
import java.util.stream.DoubleStream;
import java.util.stream.IntStream;
import java.util.stream.LongStream;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import org.jspecify.annotations.Nullable;
/**
* Static utility methods related to {@code Stream} instances.
*
* @since 33.4.0 (but since 21.0 in the JRE flavor)
*/
@GwtCompatible
/*
* Users will use most of these methods only if they're already using Stream. For a few other
* methods, like stream(Iterable), we have to rely on users not to call them without library
* desugaring.
*/
@IgnoreJRERequirement
public final class Streams {
/**
* Returns a sequential {@link Stream} of the contents of {@code iterable}, delegating to {@link
* Collection#stream} if possible.
*/
public static <T extends @Nullable Object> Stream<T> stream(Iterable<T> iterable) {
return (iterable instanceof Collection)
? ((Collection<T>) iterable).stream()
: StreamSupport.stream(iterable.spliterator(), false);
}
/**
* Returns {@link Collection#stream}.
*
* @deprecated There is no reason to use this; just invoke {@code collection.stream()} directly.
*/
@Deprecated
@InlineMe(replacement = "collection.stream()")
public static <T extends @Nullable Object> Stream<T> stream(Collection<T> collection) {
return collection.stream();
}
/**
* Returns a sequential {@link Stream} of the remaining contents of {@code iterator}. Do not use
* {@code iterator} directly after passing it to this method.
*/
public static <T extends @Nullable Object> Stream<T> stream(Iterator<T> iterator) {
return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iterator, 0), false);
}
/**
* If a value is present in {@code optional}, returns a stream containing only that element,
* otherwise returns an empty stream.
*/
public static <T> Stream<T> stream(com.google.common.base.Optional<T> optional) {
return optional.isPresent() ? Stream.of(optional.get()) : Stream.empty();
}
/**
* If a value is present in {@code optional}, returns a stream containing only that element,
* otherwise returns an empty stream.
*
* <p><b>Java 9 users:</b> use {@code optional.stream()} instead.
*/
@Beta
@InlineMe(replacement = "optional.stream()")
@InlineMeValidationDisabled("Java 9+ API only")
public static <T> Stream<T> stream(java.util.Optional<T> optional) {
return optional.isPresent() ? Stream.of(optional.get()) : Stream.empty();
}
/**
* If a value is present in {@code optional}, returns a stream containing only that element,
* otherwise returns an empty stream.
*
* <p><b>Java 9 users:</b> use {@code optional.stream()} instead.
*/
@Beta
@InlineMe(replacement = "optional.stream()")
@InlineMeValidationDisabled("Java 9+ API only")
public static IntStream stream(OptionalInt optional) {
return optional.isPresent() ? IntStream.of(optional.getAsInt()) : IntStream.empty();
}
/**
* If a value is present in {@code optional}, returns a stream containing only that element,
* otherwise returns an empty stream.
*
* <p><b>Java 9 users:</b> use {@code optional.stream()} instead.
*/
@Beta
@InlineMe(replacement = "optional.stream()")
@InlineMeValidationDisabled("Java 9+ API only")
public static LongStream stream(OptionalLong optional) {
return optional.isPresent() ? LongStream.of(optional.getAsLong()) : LongStream.empty();
}
/**
* If a value is present in {@code optional}, returns a stream containing only that element,
* otherwise returns an empty stream.
*
* <p><b>Java 9 users:</b> use {@code optional.stream()} instead.
*/
@Beta
@InlineMe(replacement = "optional.stream()")
@InlineMeValidationDisabled("Java 9+ API only")
public static DoubleStream stream(OptionalDouble optional) {
return optional.isPresent() ? DoubleStream.of(optional.getAsDouble()) : DoubleStream.empty();
}
@SuppressWarnings("CatchingUnchecked") // sneaky checked exception
private static void closeAll(BaseStream<?, ?>[] toClose) {
// If one of the streams throws an exception, continue closing the others, then throw the
// exception later. If more than one stream throws an exception, the later ones are added to the
// first as suppressed exceptions. We don't catch Error on the grounds that it should be allowed
// to propagate immediately.
Exception exception = null;
for (BaseStream<?, ?> stream : toClose) {
try {
stream.close();
} catch (Exception e) { // sneaky checked exception
if (exception == null) {
exception = e;
} else {
exception.addSuppressed(e);
}
}
}
if (exception != null) {
// Normally this is a RuntimeException that doesn't need sneakyThrow.
// But theoretically we could see sneaky checked exception
sneakyThrow(exception);
}
}
/**
* Returns a {@link Stream} containing the elements of the first stream, followed by the elements
* of the second stream, and so on.
*
* <p>This is equivalent to {@code Stream.of(streams).flatMap(stream -> stream)}, but the returned
* stream may perform better.
*
* @see Stream#concat(Stream, Stream)
*/
@SuppressWarnings("unchecked") // could probably be avoided with a forwarding Spliterator
@SafeVarargs
public static <T extends @Nullable Object> Stream<T> concat(Stream<? extends T>... streams) {
// TODO(lowasser): consider an implementation that can support SUBSIZED
boolean isParallel = false;
int characteristics = Spliterator.ORDERED | Spliterator.SIZED | Spliterator.NONNULL;
long estimatedSize = 0L;
ImmutableList.Builder<Spliterator<? extends T>> splitrsBuilder =
new ImmutableList.Builder<>(streams.length);
for (Stream<? extends T> stream : streams) {
isParallel |= stream.isParallel();
Spliterator<? extends T> splitr = stream.spliterator();
splitrsBuilder.add(splitr);
characteristics &= splitr.characteristics();
estimatedSize = LongMath.saturatedAdd(estimatedSize, splitr.estimateSize());
}
return StreamSupport.stream(
CollectSpliterators.flatMap(
splitrsBuilder.build().spliterator(),
splitr -> (Spliterator<T>) splitr,
characteristics,
estimatedSize),
isParallel)
.onClose(() -> closeAll(streams));
}
/**
* Returns an {@link IntStream} containing the elements of the first stream, followed by the
* elements of the second stream, and so on.
*
* <p>This is equivalent to {@code Stream.of(streams).flatMapToInt(stream -> stream)}, but the
* returned stream may perform better.
*
* @see IntStream#concat(IntStream, IntStream)
*/
public static IntStream concat(IntStream... streams) {
boolean isParallel = false;
int characteristics = Spliterator.ORDERED | Spliterator.SIZED | Spliterator.NONNULL;
long estimatedSize = 0L;
ImmutableList.Builder<Spliterator.OfInt> splitrsBuilder =
new ImmutableList.Builder<>(streams.length);
for (IntStream stream : streams) {
isParallel |= stream.isParallel();
Spliterator.OfInt splitr = stream.spliterator();
splitrsBuilder.add(splitr);
characteristics &= splitr.characteristics();
estimatedSize = LongMath.saturatedAdd(estimatedSize, splitr.estimateSize());
}
return StreamSupport.intStream(
CollectSpliterators.flatMapToInt(
splitrsBuilder.build().spliterator(),
splitr -> splitr,
characteristics,
estimatedSize),
isParallel)
.onClose(() -> closeAll(streams));
}
/**
* Returns a {@link LongStream} containing the elements of the first stream, followed by the
* elements of the second stream, and so on.
*
* <p>This is equivalent to {@code Stream.of(streams).flatMapToLong(stream -> stream)}, but the
* returned stream may perform better.
*
* @see LongStream#concat(LongStream, LongStream)
*/
public static LongStream concat(LongStream... streams) {
boolean isParallel = false;
int characteristics = Spliterator.ORDERED | Spliterator.SIZED | Spliterator.NONNULL;
long estimatedSize = 0L;
ImmutableList.Builder<Spliterator.OfLong> splitrsBuilder =
new ImmutableList.Builder<>(streams.length);
for (LongStream stream : streams) {
isParallel |= stream.isParallel();
Spliterator.OfLong splitr = stream.spliterator();
splitrsBuilder.add(splitr);
characteristics &= splitr.characteristics();
estimatedSize = LongMath.saturatedAdd(estimatedSize, splitr.estimateSize());
}
return StreamSupport.longStream(
CollectSpliterators.flatMapToLong(
splitrsBuilder.build().spliterator(),
splitr -> splitr,
characteristics,
estimatedSize),
isParallel)
.onClose(() -> closeAll(streams));
}
/**
* Returns a {@link DoubleStream} containing the elements of the first stream, followed by the
* elements of the second stream, and so on.
*
* <p>This is equivalent to {@code Stream.of(streams).flatMapToDouble(stream -> stream)}, but the
* returned stream may perform better.
*
* @see DoubleStream#concat(DoubleStream, DoubleStream)
*/
public static DoubleStream concat(DoubleStream... streams) {
boolean isParallel = false;
int characteristics = Spliterator.ORDERED | Spliterator.SIZED | Spliterator.NONNULL;
long estimatedSize = 0L;
ImmutableList.Builder<Spliterator.OfDouble> splitrsBuilder =
new ImmutableList.Builder<>(streams.length);
for (DoubleStream stream : streams) {
isParallel |= stream.isParallel();
Spliterator.OfDouble splitr = stream.spliterator();
splitrsBuilder.add(splitr);
characteristics &= splitr.characteristics();
estimatedSize = LongMath.saturatedAdd(estimatedSize, splitr.estimateSize());
}
return StreamSupport.doubleStream(
CollectSpliterators.flatMapToDouble(
splitrsBuilder.build().spliterator(),
splitr -> splitr,
characteristics,
estimatedSize),
isParallel)
.onClose(() -> closeAll(streams));
}
/**
* Returns a stream in which each element is the result of passing the corresponding element of
* each of {@code streamA} and {@code streamB} to {@code function}.
*
* <p>For example:
*
* {@snippet :
* Streams.zip(
* Stream.of("foo1", "foo2", "foo3"),
* Stream.of("bar1", "bar2"),
* (arg1, arg2) -> arg1 + ":" + arg2)
* }
*
* <p>will return {@code Stream.of("foo1:bar1", "foo2:bar2")}.
*
* <p>The resulting stream will only be as long as the shorter of the two input streams; if one
* stream is longer, its extra elements will be ignored.
*
* <p>Note that if you are calling {@link Stream#forEach} on the resulting stream, you might want
* to consider using {@link #forEachPair} instead of this method.
*
* <p><b>Performance note:</b> The resulting stream is not <a
* href="http://gee.cs.oswego.edu/dl/html/StreamParallelGuidance.html">efficiently splittable</a>.
* This may harm parallel performance.
*/
@Beta
public static <A extends @Nullable Object, B extends @Nullable Object, R extends @Nullable Object>
Stream<R> zip(
Stream<A> streamA, Stream<B> streamB, BiFunction<? super A, ? super B, R> function) {
checkNotNull(streamA);
checkNotNull(streamB);
checkNotNull(function);
boolean isParallel = streamA.isParallel() || streamB.isParallel(); // same as Stream.concat
Spliterator<A> splitrA = streamA.spliterator();
Spliterator<B> splitrB = streamB.spliterator();
int characteristics =
splitrA.characteristics()
& splitrB.characteristics()
& (Spliterator.SIZED | Spliterator.ORDERED);
Iterator<A> itrA = Spliterators.iterator(splitrA);
Iterator<B> itrB = Spliterators.iterator(splitrB);
return StreamSupport.stream(
new AbstractSpliterator<R>(
min(splitrA.estimateSize(), splitrB.estimateSize()), characteristics) {
@Override
public boolean tryAdvance(Consumer<? super R> action) {
if (itrA.hasNext() && itrB.hasNext()) {
action.accept(function.apply(itrA.next(), itrB.next()));
return true;
}
return false;
}
},
isParallel)
.onClose(streamA::close)
.onClose(streamB::close);
}
/**
* Invokes {@code consumer} once for each pair of <i>corresponding</i> elements in {@code streamA}
* and {@code streamB}. If one stream is longer than the other, the extra elements are silently
* ignored. Elements passed to the consumer are guaranteed to come from the same position in their
* respective source streams. For example:
*
* {@snippet :
* Streams.forEachPair(
* Stream.of("foo1", "foo2", "foo3"),
* Stream.of("bar1", "bar2"),
* (arg1, arg2) -> System.out.println(arg1 + ":" + arg2)
* }
*
* <p>will print:
*
* {@snippet :
* foo1:bar1
* foo2:bar2
* }
*
* <p><b>Warning:</b> If either supplied stream is a parallel stream, the same correspondence
* between elements will be made, but the order in which those pairs of elements are passed to the
* consumer is <i>not</i> defined.
*
* <p>Note that many usages of this method can be replaced with simpler calls to {@link #zip}.
* This method behaves equivalently to {@linkplain #zip zipping} the stream elements into
* temporary pair objects and then using {@link Stream#forEach} on that stream.
*
* @since 33.4.0 (but since 22.0 in the JRE flavor)
*/
@Beta
public static <A extends @Nullable Object, B extends @Nullable Object> void forEachPair(
Stream<A> streamA, Stream<B> streamB, BiConsumer<? super A, ? super B> consumer) {
checkNotNull(consumer);
if (streamA.isParallel() || streamB.isParallel()) {
zip(streamA, streamB, TemporaryPair::new).forEach(pair -> consumer.accept(pair.a, pair.b));
} else {
Iterator<A> iterA = streamA.iterator();
Iterator<B> iterB = streamB.iterator();
while (iterA.hasNext() && iterB.hasNext()) {
consumer.accept(iterA.next(), iterB.next());
}
}
}
// Use this carefully - it doesn't implement value semantics
private static final class TemporaryPair<A extends @Nullable Object, B extends @Nullable Object> {
@ParametricNullness final A a;
@ParametricNullness final B b;
TemporaryPair(@ParametricNullness A a, @ParametricNullness B b) {
this.a = a;
this.b = b;
}
}
/**
* Returns a stream consisting of the results of applying the given function to the elements of
* {@code stream} and their indices in the stream. For example,
*
* {@snippet :
* mapWithIndex(
* Stream.of("a", "b", "c"),
* (e, index) -> index + ":" + e)
* }
*
* <p>would return {@code Stream.of("0:a", "1:b", "2:c")}.
*
* <p>The resulting stream is <a
* href="http://gee.cs.oswego.edu/dl/html/StreamParallelGuidance.html">efficiently splittable</a>
* if and only if {@code stream} was efficiently splittable and its underlying spliterator
* reported {@link Spliterator#SUBSIZED}. This is generally the case if the underlying stream
* comes from a data structure supporting efficient indexed random access, typically an array or
* list.
*
* <p>The order of the resulting stream is defined if and only if the order of the original stream
* was defined.
*/
public static <T extends @Nullable Object, R extends @Nullable Object> Stream<R> mapWithIndex(
Stream<T> stream, FunctionWithIndex<? super T, ? extends R> function) {
checkNotNull(stream);
checkNotNull(function);
boolean isParallel = stream.isParallel();
Spliterator<T> fromSpliterator = stream.spliterator();
if (!fromSpliterator.hasCharacteristics(Spliterator.SUBSIZED)) {
Iterator<T> fromIterator = Spliterators.iterator(fromSpliterator);
return StreamSupport.stream(
new AbstractSpliterator<R>(
fromSpliterator.estimateSize(),
fromSpliterator.characteristics() & (Spliterator.ORDERED | Spliterator.SIZED)) {
long index = 0;
@Override
public boolean tryAdvance(Consumer<? super R> action) {
if (fromIterator.hasNext()) {
action.accept(function.apply(fromIterator.next(), index++));
return true;
}
return false;
}
},
isParallel)
.onClose(stream::close);
}
final class Splitr extends MapWithIndexSpliterator<Spliterator<T>, R, Splitr>
implements Consumer<T> {
@Nullable T holder;
Splitr(Spliterator<T> splitr, long index) {
super(splitr, index);
}
@Override
public void accept(@ParametricNullness T t) {
this.holder = t;
}
@Override
public boolean tryAdvance(Consumer<? super R> action) {
if (fromSpliterator.tryAdvance(this)) {
try {
// The cast is safe because tryAdvance puts a T into `holder`.
action.accept(function.apply(uncheckedCastNullableTToT(holder), index++));
return true;
} finally {
holder = null;
}
}
return false;
}
@Override
Splitr createSplit(Spliterator<T> from, long i) {
return new Splitr(from, i);
}
}
return StreamSupport.stream(new Splitr(fromSpliterator, 0), isParallel).onClose(stream::close);
}
/**
* Returns a stream consisting of the results of applying the given function to the elements of
* {@code stream} and their indexes in the stream. For example,
*
* {@snippet :
* mapWithIndex(
* IntStream.of(10, 11, 12),
* (e, index) -> index + ":" + e)
* }
*
* <p>...would return {@code Stream.of("0:10", "1:11", "2:12")}.
*
* <p>The resulting stream is <a
* href="http://gee.cs.oswego.edu/dl/html/StreamParallelGuidance.html">efficiently splittable</a>
* if and only if {@code stream} was efficiently splittable and its underlying spliterator
* reported {@link Spliterator#SUBSIZED}. This is generally the case if the underlying stream
* comes from a data structure supporting efficient indexed random access, typically an array or
* list.
*
* <p>The order of the resulting stream is defined if and only if the order of the original stream
* was defined.
*/
@SuppressWarnings("AndroidJdkLibsChecker") // b/229998664
public static <R extends @Nullable Object> Stream<R> mapWithIndex(
IntStream stream, IntFunctionWithIndex<R> function) {
checkNotNull(stream);
checkNotNull(function);
boolean isParallel = stream.isParallel();
Spliterator.OfInt fromSpliterator = stream.spliterator();
if (!fromSpliterator.hasCharacteristics(Spliterator.SUBSIZED)) {
PrimitiveIterator.OfInt fromIterator = Spliterators.iterator(fromSpliterator);
return StreamSupport.stream(
new AbstractSpliterator<R>(
fromSpliterator.estimateSize(),
fromSpliterator.characteristics() & (Spliterator.ORDERED | Spliterator.SIZED)) {
long index = 0;
@Override
public boolean tryAdvance(Consumer<? super R> action) {
if (fromIterator.hasNext()) {
action.accept(function.apply(fromIterator.nextInt(), index++));
return true;
}
return false;
}
},
isParallel)
.onClose(stream::close);
}
final class Splitr extends MapWithIndexSpliterator<Spliterator.OfInt, R, Splitr>
implements IntConsumer {
int holder;
Splitr(Spliterator.OfInt splitr, long index) {
super(splitr, index);
}
@Override
public void accept(int t) {
this.holder = t;
}
@Override
public boolean tryAdvance(Consumer<? super R> action) {
if (fromSpliterator.tryAdvance(this)) {
action.accept(function.apply(holder, index++));
return true;
}
return false;
}
@Override
Splitr createSplit(Spliterator.OfInt from, long i) {
return new Splitr(from, i);
}
}
return StreamSupport.stream(new Splitr(fromSpliterator, 0), isParallel).onClose(stream::close);
}
/**
* Returns a stream consisting of the results of applying the given function to the elements of
* {@code stream} and their indexes in the stream. For example,
*
* {@snippet :
* mapWithIndex(
* LongStream.of(10, 11, 12),
* (e, index) -> index + ":" + e)
* }
*
* <p>...would return {@code Stream.of("0:10", "1:11", "2:12")}.
*
* <p>The resulting stream is <a
* href="http://gee.cs.oswego.edu/dl/html/StreamParallelGuidance.html">efficiently splittable</a>
* if and only if {@code stream} was efficiently splittable and its underlying spliterator
* reported {@link Spliterator#SUBSIZED}. This is generally the case if the underlying stream
* comes from a data structure supporting efficient indexed random access, typically an array or
* list.
*
* <p>The order of the resulting stream is defined if and only if the order of the original stream
* was defined.
*/
@SuppressWarnings("AndroidJdkLibsChecker") // b/229998664
public static <R extends @Nullable Object> Stream<R> mapWithIndex(
LongStream stream, LongFunctionWithIndex<R> function) {
checkNotNull(stream);
checkNotNull(function);
boolean isParallel = stream.isParallel();
Spliterator.OfLong fromSpliterator = stream.spliterator();
if (!fromSpliterator.hasCharacteristics(Spliterator.SUBSIZED)) {
PrimitiveIterator.OfLong fromIterator = Spliterators.iterator(fromSpliterator);
return StreamSupport.stream(
new AbstractSpliterator<R>(
fromSpliterator.estimateSize(),
fromSpliterator.characteristics() & (Spliterator.ORDERED | Spliterator.SIZED)) {
long index = 0;
@Override
public boolean tryAdvance(Consumer<? super R> action) {
if (fromIterator.hasNext()) {
action.accept(function.apply(fromIterator.nextLong(), index++));
return true;
}
return false;
}
},
isParallel)
.onClose(stream::close);
}
final class Splitr extends MapWithIndexSpliterator<Spliterator.OfLong, R, Splitr>
implements LongConsumer {
long holder;
Splitr(Spliterator.OfLong splitr, long index) {
super(splitr, index);
}
@Override
public void accept(long t) {
this.holder = t;
}
@Override
public boolean tryAdvance(Consumer<? super R> action) {
if (fromSpliterator.tryAdvance(this)) {
action.accept(function.apply(holder, index++));
return true;
}
return false;
}
@Override
Splitr createSplit(Spliterator.OfLong from, long i) {
return new Splitr(from, i);
}
}
return StreamSupport.stream(new Splitr(fromSpliterator, 0), isParallel).onClose(stream::close);
}
/**
* Returns a stream consisting of the results of applying the given function to the elements of
* {@code stream} and their indexes in the stream. For example,
*
* {@snippet :
* mapWithIndex(
* DoubleStream.of(0.0, 1.0, 2.0)
* (e, index) -> index + ":" + e)
* }
*
* <p>...would return {@code Stream.of("0:0.0", "1:1.0", "2:2.0")}.
*
* <p>The resulting stream is <a
* href="http://gee.cs.oswego.edu/dl/html/StreamParallelGuidance.html">efficiently splittable</a>
* if and only if {@code stream} was efficiently splittable and its underlying spliterator
* reported {@link Spliterator#SUBSIZED}. This is generally the case if the underlying stream
* comes from a data structure supporting efficient indexed random access, typically an array or
* list.
*
* <p>The order of the resulting stream is defined if and only if the order of the original stream
* was defined.
*/
@SuppressWarnings("AndroidJdkLibsChecker") // b/229998664
public static <R extends @Nullable Object> Stream<R> mapWithIndex(
DoubleStream stream, DoubleFunctionWithIndex<R> function) {
checkNotNull(stream);
checkNotNull(function);
boolean isParallel = stream.isParallel();
Spliterator.OfDouble fromSpliterator = stream.spliterator();
if (!fromSpliterator.hasCharacteristics(Spliterator.SUBSIZED)) {
PrimitiveIterator.OfDouble fromIterator = Spliterators.iterator(fromSpliterator);
return StreamSupport.stream(
new AbstractSpliterator<R>(
fromSpliterator.estimateSize(),
fromSpliterator.characteristics() & (Spliterator.ORDERED | Spliterator.SIZED)) {
long index = 0;
@Override
public boolean tryAdvance(Consumer<? super R> action) {
if (fromIterator.hasNext()) {
action.accept(function.apply(fromIterator.nextDouble(), index++));
return true;
}
return false;
}
},
isParallel)
.onClose(stream::close);
}
final class Splitr extends MapWithIndexSpliterator<Spliterator.OfDouble, R, Splitr>
implements DoubleConsumer {
double holder;
Splitr(Spliterator.OfDouble splitr, long index) {
super(splitr, index);
}
@Override
public void accept(double t) {
this.holder = t;
}
@Override
public boolean tryAdvance(Consumer<? super R> action) {
if (fromSpliterator.tryAdvance(this)) {
action.accept(function.apply(holder, index++));
return true;
}
return false;
}
@Override
Splitr createSplit(Spliterator.OfDouble from, long i) {
return new Splitr(from, i);
}
}
return StreamSupport.stream(new Splitr(fromSpliterator, 0), isParallel).onClose(stream::close);
}
/**
* An analogue of {@link java.util.function.Function} also accepting an index.
*
* <p>This interface is only intended for use by callers of {@link #mapWithIndex(Stream,
* FunctionWithIndex)}.
*
* @since 33.4.0 (but since 21.0 in the JRE flavor)
*/
public interface FunctionWithIndex<T extends @Nullable Object, R extends @Nullable Object> {
/** Applies this function to the given argument and its index within a stream. */
@ParametricNullness
R apply(@ParametricNullness T from, long index);
}
/*
* @IgnoreJRERequirement should be redundant with the one on Streams itself, but it's necessary as
* of Animal Sniffer 1.24. Maybe Animal Sniffer processes this nested class before it processes
* Streams and thus hasn't had a chance to see Streams's annotation?
*/
@IgnoreJRERequirement
private abstract static class MapWithIndexSpliterator<
F extends Spliterator<?>,
R extends @Nullable Object,
S extends MapWithIndexSpliterator<F, R, S>>
implements Spliterator<R> {
final F fromSpliterator;
long index;
MapWithIndexSpliterator(F fromSpliterator, long index) {
this.fromSpliterator = fromSpliterator;
this.index = index;
}
abstract S createSplit(F from, long i);
@Override
public @Nullable S trySplit() {
Spliterator<?> splitOrNull = fromSpliterator.trySplit();
if (splitOrNull == null) {
return null;
}
@SuppressWarnings("unchecked")
F split = (F) splitOrNull;
S result = createSplit(split, index);
this.index += split.getExactSizeIfKnown();
return result;
}
@Override
public long estimateSize() {
return fromSpliterator.estimateSize();
}
@Override
public int characteristics() {
return fromSpliterator.characteristics()
& (Spliterator.ORDERED | Spliterator.SIZED | Spliterator.SUBSIZED);
}
}
/**
* An analogue of {@link java.util.function.IntFunction} also accepting an index.
*
* <p>This interface is only intended for use by callers of {@link #mapWithIndex(IntStream,
* IntFunctionWithIndex)}.
*
* @since 33.4.0 (but since 21.0 in the JRE flavor)
*/
public interface IntFunctionWithIndex<R extends @Nullable Object> {
/** Applies this function to the given argument and its index within a stream. */
@ParametricNullness
R apply(int from, long index);
}
/**
* An analogue of {@link java.util.function.LongFunction} also accepting an index.
*
* <p>This interface is only intended for use by callers of {@link #mapWithIndex(LongStream,
* LongFunctionWithIndex)}.
*
* @since 33.4.0 (but since 21.0 in the JRE flavor)
*/
public interface LongFunctionWithIndex<R extends @Nullable Object> {
/** Applies this function to the given argument and its index within a stream. */
@ParametricNullness
R apply(long from, long index);
}
/**
* An analogue of {@link java.util.function.DoubleFunction} also accepting an index.
*
* <p>This interface is only intended for use by callers of {@link #mapWithIndex(DoubleStream,
* DoubleFunctionWithIndex)}.
*
* @since 33.4.0 (but since 21.0 in the JRE flavor)
*/
public interface DoubleFunctionWithIndex<R extends @Nullable Object> {
/** Applies this function to the given argument and its index within a stream. */
@ParametricNullness
R apply(double from, long index);
}
/**
* Returns the last element of the specified stream, or {@link java.util.Optional#empty} if the
* stream is empty.
*
* <p>Equivalent to {@code stream.reduce((a, b) -> b)}, but may perform significantly better. This
* method's runtime will be between O(log n) and O(n), performing better on <a
* href="http://gee.cs.oswego.edu/dl/html/StreamParallelGuidance.html">efficiently splittable</a>
* streams.
*
* <p>If the stream has nondeterministic order, this has equivalent semantics to {@link
* Stream#findAny} (which you might as well use).
*
* @see Stream#findFirst()
* @throws NullPointerException if the last element of the stream is null
*/
/*
* By declaring <T> instead of <T extends @Nullable Object>, we declare this method as requiring a
* stream whose elements are non-null. However, the method goes out of its way to still handle
* nulls in the stream. This means that the method can safely be used with a stream that contains
* nulls as long as the *last* element is *not* null.
*
* (To "go out of its way," the method tracks a `set` bit so that it can distinguish "the final
* split has a last element of null, so throw NPE" from "the final split was empty, so look for an
* element in the prior one.")
*/
public static <T> java.util.Optional<T> findLast(Stream<T> stream) {
final class OptionalState {
boolean set = false;
@Nullable T value = null;
void set(T value) {
this.set = true;
this.value = value;
}
T get() {
/*
* requireNonNull is safe because we call get() only if we've previously called set().
*
* (For further discussion of nullness, see the comment above the method.)
*/
return requireNonNull(value);
}
}
OptionalState state = new OptionalState();
Deque<Spliterator<T>> splits = new ArrayDeque<>();
splits.addLast(stream.spliterator());
while (!splits.isEmpty()) {
Spliterator<T> spliterator = splits.removeLast();
if (spliterator.getExactSizeIfKnown() == 0) {
continue; // drop this split
}
// Many spliterators will have trySplits that are SUBSIZED even if they are not themselves
// SUBSIZED.
if (spliterator.hasCharacteristics(Spliterator.SUBSIZED)) {
// we can drill down to exactly the smallest nonempty spliterator
while (true) {
Spliterator<T> prefix = spliterator.trySplit();
if (prefix == null || prefix.getExactSizeIfKnown() == 0) {
break;
} else if (spliterator.getExactSizeIfKnown() == 0) {
spliterator = prefix;
break;
}
}
// spliterator is known to be nonempty now
spliterator.forEachRemaining(state::set);
return java.util.Optional.of(state.get());
}
Spliterator<T> prefix = spliterator.trySplit();
if (prefix == null || prefix.getExactSizeIfKnown() == 0) {
// we can't split this any further
spliterator.forEachRemaining(state::set);
if (state.set) {
return java.util.Optional.of(state.get());
}
// fall back to the last split
continue;
}
splits.addLast(prefix);
splits.addLast(spliterator);
}
return java.util.Optional.empty();
}
/**
* Returns the last element of the specified stream, or {@link OptionalInt#empty} if the stream is
* empty.
*
* <p>Equivalent to {@code stream.reduce((a, b) -> b)}, but may perform significantly better. This
* method's runtime will be between O(log n) and O(n), performing better on <a
* href="http://gee.cs.oswego.edu/dl/html/StreamParallelGuidance.html">efficiently splittable</a>
* streams.
*
* @see IntStream#findFirst()
* @throws NullPointerException if the last element of the stream is null
*/
public static OptionalInt findLast(IntStream stream) {
// findLast(Stream) does some allocation, so we might as well box some more
java.util.Optional<Integer> boxedLast = findLast(stream.boxed());
return boxedLast.map(OptionalInt::of).orElse(OptionalInt.empty());
}
/**
* Returns the last element of the specified stream, or {@link OptionalLong#empty} if the stream
* is empty.
*
* <p>Equivalent to {@code stream.reduce((a, b) -> b)}, but may perform significantly better. This
* method's runtime will be between O(log n) and O(n), performing better on <a
* href="http://gee.cs.oswego.edu/dl/html/StreamParallelGuidance.html">efficiently splittable</a>
* streams.
*
* @see LongStream#findFirst()
* @throws NullPointerException if the last element of the stream is null
*/
public static OptionalLong findLast(LongStream stream) {
// findLast(Stream) does some allocation, so we might as well box some more
java.util.Optional<Long> boxedLast = findLast(stream.boxed());
return boxedLast.map(OptionalLong::of).orElse(OptionalLong.empty());
}
/**
* Returns the last element of the specified stream, or {@link OptionalDouble#empty} if the stream
* is empty.
*
* <p>Equivalent to {@code stream.reduce((a, b) -> b)}, but may perform significantly better. This
* method's runtime will be between O(log n) and O(n), performing better on <a
* href="http://gee.cs.oswego.edu/dl/html/StreamParallelGuidance.html">efficiently splittable</a>
* streams.
*
* @see DoubleStream#findFirst()
* @throws NullPointerException if the last element of the stream is null
*/
public static OptionalDouble findLast(DoubleStream stream) {
// findLast(Stream) does some allocation, so we might as well box some more
java.util.Optional<Double> boxedLast = findLast(stream.boxed());
return boxedLast.map(OptionalDouble::of).orElse(OptionalDouble.empty());
}
private Streams() {}
}
|
apache/juneau | 36,143 | juneau-utest/src/test/java/org/apache/juneau/httppart/HttpPartSchema_Body_Test.java | // ***************************************************************************************************************************
// * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file *
// * distributed with this work for additional information regarding copyright ownership. The ASF licenses this file *
// * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance *
// * with the License. You may obtain a copy of the License at *
// * *
// * http://www.apache.org/licenses/LICENSE-2.0 *
// * *
// * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an *
// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the *
// * specific language governing permissions and limitations under the License. *
// ***************************************************************************************************************************
package org.apache.juneau.httppart;
import static org.apache.juneau.TestUtils.*;
import static org.junit.jupiter.api.Assertions.*;
import org.apache.juneau.*;
import org.apache.juneau.annotation.*;
import org.apache.juneau.http.annotation.*;
import org.apache.juneau.reflect.*;
import org.junit.jupiter.api.*;
class HttpPartSchema_Body_Test extends TestBase {
//-----------------------------------------------------------------------------------------------------------------
// Basic test
//-----------------------------------------------------------------------------------------------------------------
@Test void a01_basic() {
assertDoesNotThrow(()->HttpPartSchema.create().build());
}
//-----------------------------------------------------------------------------------------------------------------
// @Body
//-----------------------------------------------------------------------------------------------------------------
@Content
@Schema(
d={"b1","b2"},
$ref="c1",
r=true
)
public static class A02 {}
@Test void a02_basic_onClass() {
var s = HttpPartSchema.create().applyAll(Content.class, A02.class).noValidate().build();
assertTrue(s.isRequired());
}
public static class A03 {
public void a( // NOSONAR
@Content
@Schema(
d={"b1","b2"},
$ref="c1",
r=true
)
String x
) {
}
}
@Test void a03_basic_onParameter() throws Exception {
var mpi = MethodInfo.of(A03.class.getMethod("a", String.class)).getParam(0);
var s = HttpPartSchema.create().applyAll(Content.class, mpi).noValidate().build();
assertTrue(s.isRequired());
}
public static class A04 {
public void a( // NOSONAR
@Content
@Schema(
d={"b3","b3"},
$ref="c3",
r=true
)
A02 x
) {
}
}
@Test void a04_basic_onParameterAndClass() throws Exception {
var mpi = MethodInfo.of(A04.class.getMethod("a", A02.class)).getParam(0);
var s = HttpPartSchema.create().applyAll(Content.class, mpi).noValidate().build();
assertTrue(s.isRequired());
}
@Content
@Schema(
t="number",
f="int32",
max="1",
min="2",
mo="3",
p="4",
maxl=1,
minl=2,
maxi=3,
mini=4,
maxp=5,
minp=6,
emax=true,
emin=true,
ui=true,
df={"c1","c2"},
e="e1,e2",
items=@Items(
t="integer",
f="int64",
cf="ssv",
max="5",
min="6",
mo="7",
p="8",
maxl=5,
minl=6,
maxi=7,
mini=8,
emax=false,
emin=false,
ui=false,
df={"c3","c4"},
e="e3,e4",
items=@SubItems(
t="string",
f="float",
cf="tsv",
max="9",
min="10",
mo="11",
p="12",
maxl=9,
minl=10,
maxi=11,
mini=12,
emax=true,
emin=true,
ui=true,
df={"c5","c6"},
e="e5,e6",
items={
"type:'array',",
"format:'double',",
"collectionFormat:'pipes',",
"maximum:'13',",
"minimum:'14',",
"multipleOf:'15',",
"pattern:'16',",
"maxLength:13,",
"minLength:14,",
"maxItems:15,",
"minItems:16,",
"exclusiveMaximum:false,",
"exclusiveMinimum:false,",
"uniqueItems:false,",
"default:'c7\\nc8',",
"enum:['e7','e8']",
}
)
)
)
public static class A05 {}
@Test void a05_basic_nestedItems_onClass() {
var s = HttpPartSchema.create().applyAll(Content.class, A05.class).noValidate().build();
assertBean(
s,
"type,format,maximum,minimum,multipleOf,pattern,maxLength,minLength,maxItems,minItems,maxProperties,minProperties,exclusiveMaximum,exclusiveMinimum,uniqueItems,enum,default",
"NUMBER,INT32,1,2,3,4,1,2,3,4,5,6,true,true,true,[e1,e2],c1\nc2"
);
var items = s.getItems();
assertBean(
items,
"type,format,collectionFormat,maximum,minimum,multipleOf,pattern,maxLength,minLength,maxItems,minItems,exclusiveMaximum,exclusiveMinimum,uniqueItems,enum,default",
"INTEGER,INT64,SSV,5,6,7,8,5,6,7,8,false,false,false,[e3,e4],c3\nc4"
);
items = items.getItems();
assertBean(
items,
"type,format,collectionFormat,maximum,minimum,multipleOf,pattern,maxLength,minLength,maxItems,minItems,exclusiveMaximum,exclusiveMinimum,uniqueItems,enum,default",
"STRING,FLOAT,TSV,9,10,11,12,9,10,11,12,true,true,true,[e5,e6],c5\nc6"
);
items = items.getItems();
assertBean(
items,
"type,format,collectionFormat,maximum,minimum,multipleOf,pattern,maxLength,minLength,maxItems,minItems,exclusiveMaximum,exclusiveMinimum,uniqueItems,enum,default",
"ARRAY,DOUBLE,PIPES,13,14,15,16,13,14,15,16,false,false,false,[e7,e8],c7\nc8"
);
}
//-----------------------------------------------------------------------------------------------------------------
// String input validations.
//-----------------------------------------------------------------------------------------------------------------
@Content @Schema(required=true)
public static class B01a {}
@Test void b01a_required() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, B01a.class).build();
s.validateInput("x");
assertThrowsWithMessage(SchemaValidationException.class, "No value specified.", ()->s.validateInput(null));
assertThrowsWithMessage(SchemaValidationException.class, "Empty value not allowed.", ()->s.validateInput(""));
}
@Content
@Schema(p="x.*",aev=true)
public static class B02a {}
@Test void b02a_pattern() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, B02a.class).build();
s.validateInput("x");
s.validateInput("xx");
assertThrowsWithMessage(SchemaValidationException.class, "Value does not match expected pattern. Must match pattern: x.*", ()->s.validateInput(""));
assertThrowsWithMessage(SchemaValidationException.class, "Value does not match expected pattern. Must match pattern: x.*", ()->s.validateInput("y"));
}
@Content
@Schema(
items=@Items(
p="w.*",
items=@SubItems(
p="x.*",
items={
"pattern:'y.*',",
"items:{pattern:'z.*'}"
}
)
)
)
public static class B02b {}
@Content
@Schema(
minl=2, maxl=3
)
public static class B03a {}
@Test void b03a_length() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, B03a.class).build();
s.validateInput("12");
s.validateInput("123");
s.validateInput(null);
assertThrowsWithMessage(SchemaValidationException.class, "Minimum length of value not met.", ()->s.validateInput("1"));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum length of value exceeded.", ()->s.validateInput("1234"));
}
@Content
@Schema(
items=@Items(
minl=2, maxl=3,
items=@SubItems(
minl=3, maxl=4,
items={
"minLength:4,maxLength:5,",
"items:{minLength:5,maxLength:6}"
}
)
)
)
public static class B03b {}
@Test void b03b_length_items() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, B03b.class).build();
s.getItems().validateInput("12");
s.getItems().getItems().validateInput("123");
s.getItems().getItems().getItems().validateInput("1234");
s.getItems().getItems().getItems().getItems().validateInput("12345");
s.getItems().validateInput("123");
s.getItems().getItems().validateInput("1234");
s.getItems().getItems().getItems().validateInput("12345");
s.getItems().getItems().getItems().getItems().validateInput("123456");
s.getItems().validateInput(null);
s.getItems().getItems().validateInput(null);
s.getItems().getItems().getItems().validateInput(null);
s.getItems().getItems().getItems().getItems().validateInput(null);
assertThrowsWithMessage(SchemaValidationException.class, "Minimum length of value not met.", ()->s.getItems().validateInput("1"));
assertThrowsWithMessage(SchemaValidationException.class, "Minimum length of value not met.", ()->s.getItems().getItems().validateInput("12"));
assertThrowsWithMessage(SchemaValidationException.class, "Minimum length of value not met.", ()->s.getItems().getItems().getItems().validateInput("123"));
assertThrowsWithMessage(SchemaValidationException.class, "Minimum length of value not met.", ()->s.getItems().getItems().getItems().getItems().validateInput("1234"));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum length of value exceeded.", ()->s.getItems().validateInput("1234"));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum length of value exceeded.", ()->s.getItems().getItems().validateInput("12345"));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum length of value exceeded.", ()->s.getItems().getItems().getItems().validateInput("123456"));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum length of value exceeded.", ()->s.getItems().getItems().getItems().getItems().validateInput("1234567"));
}
@Content
@Schema(
e="X,Y"
)
public static class B04a {}
@Test void b04a_enum() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, B04a.class).build();
s.validateInput("X");
s.validateInput("Y");
s.validateInput(null);
assertThrowsWithMessage(SchemaValidationException.class, "Value does not match one of the expected values. Must be one of the following: X, Y", ()->s.validateInput("Z"));
}
@Content
@Schema(
e=" X , Y "
)
public static class B04b {}
@Test void b04b_enum() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, B04b.class).build();
s.validateInput("X");
s.validateInput("Y");
s.validateInput(null);
assertThrowsWithMessage(SchemaValidationException.class, "Value does not match one of the expected values. Must be one of the following: X, Y", ()->s.validateInput("Z"));
}
@Content
@Schema(
e="X,Y"
)
public static class B04c {}
@Test void b04c_enum_json() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, B04c.class).build();
s.validateInput("X");
s.validateInput("Y");
s.validateInput(null);
assertThrowsWithMessage(SchemaValidationException.class, "Value does not match one of the expected values. Must be one of the following: X, Y", ()->s.validateInput("Z"));
}
@Content
@Schema(
items=@Items(
e="W",
items=@SubItems(
e="X",
items={
"enum:['Y'],",
"items:{enum:['Z']}"
}
)
)
)
public static class B04d {}
@Test void b04d_enum_items() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, B04d.class).build();
s.getItems().validateInput("W");
s.getItems().getItems().validateInput("X");
s.getItems().getItems().getItems().validateInput("Y");
s.getItems().getItems().getItems().getItems().validateInput("Z");
assertThrowsWithMessage(SchemaValidationException.class, "Value does not match one of the expected values. Must be one of the following: W", ()->s.getItems().validateInput("V"));
assertThrowsWithMessage(SchemaValidationException.class, "Value does not match one of the expected values. Must be one of the following: X", ()->s.getItems().getItems().validateInput("V"));
assertThrowsWithMessage(SchemaValidationException.class, "Value does not match one of the expected values. Must be one of the following: Y", ()->s.getItems().getItems().getItems().validateInput("V"));
assertThrowsWithMessage(SchemaValidationException.class, "Value does not match one of the expected values. Must be one of the following: Z", ()->s.getItems().getItems().getItems().getItems().validateInput("V"));
}
//-----------------------------------------------------------------------------------------------------------------
// Numeric validations
//-----------------------------------------------------------------------------------------------------------------
@Content
@Schema(
min="10", max="100"
)
public static class C01a {}
@Test void c01a_minmax_ints() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, C01a.class).build();
s.validateOutput(10, BeanContext.DEFAULT);
s.validateOutput(100, BeanContext.DEFAULT);
s.validateOutput(null, BeanContext.DEFAULT);
assertThrowsWithMessage(SchemaValidationException.class, "Minimum value not met.", ()->s.validateOutput(9, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum value exceeded.", ()->s.validateOutput(101, BeanContext.DEFAULT));
}
@Content
@Schema(
items=@Items(
min="10", max="100",
items=@SubItems(
min="100", max="1000",
items={
"minimum:1000,maximum:10000,",
"items:{minimum:10000,maximum:100000}"
}
)
)
)
public static class C01b {}
@Test void c01b_minmax_ints_items() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, C01b.class).build();
s.getItems().validateOutput(10, BeanContext.DEFAULT);
s.getItems().getItems().validateOutput(100, BeanContext.DEFAULT);
s.getItems().getItems().getItems().validateOutput(1000, BeanContext.DEFAULT);
s.getItems().getItems().getItems().getItems().validateOutput(10000, BeanContext.DEFAULT);
s.getItems().validateOutput(100, BeanContext.DEFAULT);
s.getItems().getItems().validateOutput(1000, BeanContext.DEFAULT);
s.getItems().getItems().getItems().validateOutput(10000, BeanContext.DEFAULT);
s.getItems().getItems().getItems().getItems().validateOutput(100000, BeanContext.DEFAULT);
assertThrowsWithMessage(SchemaValidationException.class, "Minimum value not met.", ()->s.getItems().validateOutput(9, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Minimum value not met.", ()->s.getItems().getItems().validateOutput(99, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Minimum value not met.", ()->s.getItems().getItems().getItems().validateOutput(999, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Minimum value not met.", ()->s.getItems().getItems().getItems().getItems().validateOutput(9999, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum value exceeded.", ()->s.getItems().validateOutput(101, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum value exceeded.", ()->s.getItems().getItems().validateOutput(1001, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum value exceeded.", ()->s.getItems().getItems().getItems().validateOutput(10001, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum value exceeded.", ()->s.getItems().getItems().getItems().getItems().validateOutput(100001, BeanContext.DEFAULT));
}
@Content
@Schema(
min="10", max="100", emin=true, emax=true
)
public static class C02a {}
@Test void c02a_minmax_exclusive() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, C02a.class).build();
s.validateOutput(11, BeanContext.DEFAULT);
s.validateOutput(99, BeanContext.DEFAULT);
s.validateOutput(null, BeanContext.DEFAULT);
assertThrowsWithMessage(SchemaValidationException.class, "Minimum value not met.", ()->s.validateOutput(10, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum value exceeded.", ()->s.validateOutput(100, BeanContext.DEFAULT));
}
@Content
@Schema(
items=@Items(
min="10", max="100", emin=true, emax=true,
items=@SubItems(
min="100", max="1000", emin=true, emax=true,
items={
"minimum:1000,maximum:10000,exclusiveMinimum:true,exclusiveMaximum:true,",
"items:{minimum:10000,maximum:100000,exclusiveMinimum:true,exclusiveMaximum:true}"
}
)
)
)
public static class C02b {}
@Test void c02b_minmax_exclusive_items() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, C02b.class).build();
s.getItems().validateOutput(11, BeanContext.DEFAULT);
s.getItems().getItems().validateOutput(101, BeanContext.DEFAULT);
s.getItems().getItems().getItems().validateOutput(1001, BeanContext.DEFAULT);
s.getItems().getItems().getItems().getItems().validateOutput(10001, BeanContext.DEFAULT);
s.getItems().validateOutput(99, BeanContext.DEFAULT);
s.getItems().getItems().validateOutput(999, BeanContext.DEFAULT);
s.getItems().getItems().getItems().validateOutput(9999, BeanContext.DEFAULT);
s.getItems().getItems().getItems().getItems().validateOutput(99999, BeanContext.DEFAULT);
assertThrowsWithMessage(SchemaValidationException.class, "Minimum value not met.", ()->s.getItems().validateOutput(10, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Minimum value not met.", ()->s.getItems().getItems().validateOutput(100, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Minimum value not met.", ()->s.getItems().getItems().getItems().validateOutput(1000, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Minimum value not met.", ()->s.getItems().getItems().getItems().getItems().validateOutput(10000, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum value exceeded.", ()->s.getItems().validateOutput(100, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum value exceeded.", ()->s.getItems().getItems().validateOutput(1000, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum value exceeded.", ()->s.getItems().getItems().getItems().validateOutput(10000, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum value exceeded.", ()->s.getItems().getItems().getItems().getItems().validateOutput(100000, BeanContext.DEFAULT));
}
@Content
@Schema(
min="10.1", max="100.1"
)
public static class C03a {}
@Test void c03_minmax_floats() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, C03a.class).build();
s.validateOutput(10.1f, BeanContext.DEFAULT);
s.validateOutput(100.1f, BeanContext.DEFAULT);
s.validateOutput(null, BeanContext.DEFAULT);
assertThrowsWithMessage(SchemaValidationException.class, "Minimum value not met.", ()->s.validateOutput(10f, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum value exceeded.", ()->s.validateOutput(100.2f, BeanContext.DEFAULT));
}
@Content
@Schema(
items=@Items(
min="10.1", max="100.1",
items=@SubItems(
min="100.1", max="1000.1",
items={
"minimum:1000.1,maximum:10000.1,",
"items:{minimum:10000.1,maximum:100000.1}"
}
)
)
)
public static class C03b {}
@Test void c03b_minmax_floats_items() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, C03b.class).build();
s.getItems().validateOutput(10.1f, BeanContext.DEFAULT);
s.getItems().getItems().validateOutput(100.1f, BeanContext.DEFAULT);
s.getItems().getItems().getItems().validateOutput(1000.1f, BeanContext.DEFAULT);
s.getItems().getItems().getItems().getItems().validateOutput(10000.1f, BeanContext.DEFAULT);
s.getItems().validateOutput(100.1f, BeanContext.DEFAULT);
s.getItems().getItems().validateOutput(1000.1f, BeanContext.DEFAULT);
s.getItems().getItems().getItems().validateOutput(10000.1f, BeanContext.DEFAULT);
s.getItems().getItems().getItems().getItems().validateOutput(100000.1f, BeanContext.DEFAULT);
assertThrowsWithMessage(SchemaValidationException.class, "Minimum value not met.", ()->s.getItems().validateOutput(10f, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Minimum value not met.", ()->s.getItems().getItems().validateOutput(100f, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Minimum value not met.", ()->s.getItems().getItems().getItems().validateOutput(1000f, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Minimum value not met.", ()->s.getItems().getItems().getItems().getItems().validateOutput(10000f, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum value exceeded.", ()->s.getItems().validateOutput(100.2f, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum value exceeded.", ()->s.getItems().getItems().validateOutput(1000.2f, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum value exceeded.", ()->s.getItems().getItems().getItems().validateOutput(10000.2f, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum value exceeded.", ()->s.getItems().getItems().getItems().getItems().validateOutput(100000.2f, BeanContext.DEFAULT));
}
@Content
@Schema(
min="10.1", max="100.1", emin=true, emax=true
)
public static class C04a {}
@Test void c04a_minmax_floats_exclusive() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, C04a.class).build();
s.validateOutput(10.2f, BeanContext.DEFAULT);
s.validateOutput(100f, BeanContext.DEFAULT);
s.validateOutput(null, BeanContext.DEFAULT);
assertThrowsWithMessage(SchemaValidationException.class, "Minimum value not met.", ()->s.validateOutput(10.1f, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum value exceeded.", ()->s.validateOutput(100.1f, BeanContext.DEFAULT));
}
@Content
@Schema(
items=@Items(
min="10.1", max="100.1", emin=true, emax=true,
items=@SubItems(
min="100.1", max="1000.1", emin=true, emax=true,
items={
"minimum:1000.1,maximum:10000.1,exclusiveMinimum:true,exclusiveMaximum:true,",
"items:{minimum:10000.1,maximum:100000.1,exclusiveMinimum:true,exclusiveMaximum:true}"
}
)
)
)
public static class C04b {}
@Test void c04b_minmax_floats_exclusive_items() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, C04b.class).build();
s.getItems().validateOutput(10.2f, BeanContext.DEFAULT);
s.getItems().getItems().validateOutput(100.2f, BeanContext.DEFAULT);
s.getItems().getItems().getItems().validateOutput(1000.2f, BeanContext.DEFAULT);
s.getItems().getItems().getItems().getItems().validateOutput(10000.2f, BeanContext.DEFAULT);
s.getItems().validateOutput(100f, BeanContext.DEFAULT);
s.getItems().getItems().validateOutput(1000f, BeanContext.DEFAULT);
s.getItems().getItems().getItems().validateOutput(10000f, BeanContext.DEFAULT);
s.getItems().getItems().getItems().getItems().validateOutput(100000f, BeanContext.DEFAULT);
assertThrowsWithMessage(SchemaValidationException.class, "Minimum value not met.", ()->s.getItems().validateOutput(10.1f, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Minimum value not met.", ()->s.getItems().getItems().validateOutput(100.1f, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Minimum value not met.", ()->s.getItems().getItems().getItems().validateOutput(1000.1f, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Minimum value not met.", ()->s.getItems().getItems().getItems().getItems().validateOutput(10000.1f, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum value exceeded.", ()->s.getItems().validateOutput(100.1f, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum value exceeded.", ()->s.getItems().getItems().validateOutput(1000.1f, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum value exceeded.", ()->s.getItems().getItems().getItems().validateOutput(10000.1f, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum value exceeded.", ()->s.getItems().getItems().getItems().getItems().validateOutput(100000.1f, BeanContext.DEFAULT));
}
@Content
@Schema(
mo="10"
)
public static class C05a {}
@Test void c05a_multipleOf() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, C05a.class).build();
s.validateOutput(0, BeanContext.DEFAULT);
s.validateOutput(10, BeanContext.DEFAULT);
s.validateOutput(20, BeanContext.DEFAULT);
s.validateOutput(10f, BeanContext.DEFAULT);
s.validateOutput(20f, BeanContext.DEFAULT);
s.validateOutput(null, BeanContext.DEFAULT);
assertThrowsWithMessage(SchemaValidationException.class, "Multiple-of not met.", ()->s.validateOutput(11, BeanContext.DEFAULT));
}
@Content
@Schema(
items=@Items(
mo="10",
items=@SubItems(
mo="100",
items={
"multipleOf:1000,",
"items:{multipleOf:10000}"
}
)
)
)
public static class C05b {}
@Test void c05b_multipleOf_items() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, C05b.class).build();
s.getItems().validateOutput(0, BeanContext.DEFAULT);
s.getItems().getItems().validateOutput(0, BeanContext.DEFAULT);
s.getItems().getItems().getItems().validateOutput(0, BeanContext.DEFAULT);
s.getItems().getItems().getItems().getItems().validateOutput(0, BeanContext.DEFAULT);
s.getItems().validateOutput(10, BeanContext.DEFAULT);
s.getItems().getItems().validateOutput(100, BeanContext.DEFAULT);
s.getItems().getItems().getItems().validateOutput(1000, BeanContext.DEFAULT);
s.getItems().getItems().getItems().getItems().validateOutput(10000, BeanContext.DEFAULT);
s.getItems().validateOutput(20, BeanContext.DEFAULT);
s.getItems().getItems().validateOutput(200, BeanContext.DEFAULT);
s.getItems().getItems().getItems().validateOutput(2000, BeanContext.DEFAULT);
s.getItems().getItems().getItems().getItems().validateOutput(20000, BeanContext.DEFAULT);
s.getItems().validateOutput(10f, BeanContext.DEFAULT);
s.getItems().getItems().validateOutput(100f, BeanContext.DEFAULT);
s.getItems().getItems().getItems().validateOutput(1000f, BeanContext.DEFAULT);
s.getItems().getItems().getItems().getItems().validateOutput(10000f, BeanContext.DEFAULT);
s.getItems().validateOutput(20f, BeanContext.DEFAULT);
s.getItems().getItems().validateOutput(200f, BeanContext.DEFAULT);
s.getItems().getItems().getItems().validateOutput(2000f, BeanContext.DEFAULT);
s.getItems().getItems().getItems().getItems().validateOutput(20000f, BeanContext.DEFAULT);
assertThrowsWithMessage(SchemaValidationException.class, "Multiple-of not met.", ()->s.getItems().validateOutput(11, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Multiple-of not met.", ()->s.getItems().getItems().validateOutput(101, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Multiple-of not met.", ()->s.getItems().getItems().getItems().validateOutput(1001, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Multiple-of not met.", ()->s.getItems().getItems().getItems().getItems().validateOutput(10001, BeanContext.DEFAULT));
}
@Content
@Schema(
mo="10.1"
)
public static class C06a {}
@Test void c06a_multipleOf_floats() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, C06a.class).build();
s.validateOutput(0, BeanContext.DEFAULT);
s.validateOutput(10.1f, BeanContext.DEFAULT);
s.validateOutput(20.2f, BeanContext.DEFAULT);
s.validateOutput(null, BeanContext.DEFAULT);
assertThrowsWithMessage(SchemaValidationException.class, "Multiple-of not met.", ()->s.validateOutput(10.2f, BeanContext.DEFAULT));
}
@Content
@Schema(
items=@Items(
mo="10.1",
items=@SubItems(
mo="100.1",
items={
"multipleOf:1000.1,",
"items:{multipleOf:10000.1}"
}
)
)
)
public static class C06b {}
@Test void c06b_multipleOf_floats_items() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, C06b.class).build();
s.getItems().validateOutput(0, BeanContext.DEFAULT);
s.getItems().getItems().validateOutput(0, BeanContext.DEFAULT);
s.getItems().getItems().getItems().validateOutput(0, BeanContext.DEFAULT);
s.getItems().getItems().getItems().getItems().validateOutput(0, BeanContext.DEFAULT);
s.getItems().validateOutput(10.1f, BeanContext.DEFAULT);
s.getItems().getItems().validateOutput(100.1f, BeanContext.DEFAULT);
s.getItems().getItems().getItems().validateOutput(1000.1f, BeanContext.DEFAULT);
s.getItems().getItems().getItems().getItems().validateOutput(10000.1f, BeanContext.DEFAULT);
s.getItems().validateOutput(20.2f, BeanContext.DEFAULT);
s.getItems().getItems().validateOutput(200.2f, BeanContext.DEFAULT);
s.getItems().getItems().getItems().validateOutput(2000.2f, BeanContext.DEFAULT);
s.getItems().getItems().getItems().getItems().validateOutput(20000.2f, BeanContext.DEFAULT);
assertThrowsWithMessage(SchemaValidationException.class, "Multiple-of not met.", ()->s.getItems().validateOutput(10.2f, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Multiple-of not met.", ()->s.getItems().getItems().validateOutput(100.2f, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Multiple-of not met.", ()->s.getItems().getItems().getItems().validateOutput(1000.2f, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Multiple-of not met.", ()->s.getItems().getItems().getItems().getItems().validateOutput(10000.2f, BeanContext.DEFAULT));
}
//-----------------------------------------------------------------------------------------------------------------
// Collections/Array validations
//-----------------------------------------------------------------------------------------------------------------
@Content
@Schema(
items=@Items(
ui=true,
items=@SubItems(
ui=true,
items={
"uniqueItems:true,",
"items:{uniqueItems:true}"
}
)
)
)
public static class D01 {}
@Test void d01a_uniqueItems_arrays() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, D01.class).build();
var good = split("a,b");
var bad = split("a,a");
s.getItems().validateOutput(good, BeanContext.DEFAULT);
s.getItems().getItems().validateOutput(good, BeanContext.DEFAULT);
s.getItems().getItems().getItems().validateOutput(good, BeanContext.DEFAULT);
s.getItems().getItems().getItems().getItems().validateOutput(good, BeanContext.DEFAULT);
s.getItems().validateOutput(null, BeanContext.DEFAULT);
assertThrowsWithMessage(SchemaValidationException.class, "Duplicate items not allowed.", ()->s.getItems().validateOutput(bad, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Duplicate items not allowed.", ()->s.getItems().getItems().validateOutput(bad, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Duplicate items not allowed.", ()->s.getItems().getItems().getItems().validateOutput(bad, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Duplicate items not allowed.", ()->s.getItems().getItems().getItems().getItems().validateOutput(bad, BeanContext.DEFAULT));
}
@Test void d01b_uniqueItems_collections() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, D01.class).build();
var good = alist("a","b");
var bad = alist("a","a");
s.getItems().validateOutput(good, BeanContext.DEFAULT);
s.getItems().getItems().validateOutput(good, BeanContext.DEFAULT);
s.getItems().getItems().getItems().validateOutput(good, BeanContext.DEFAULT);
s.getItems().getItems().getItems().getItems().validateOutput(good, BeanContext.DEFAULT);
s.getItems().validateOutput(null, BeanContext.DEFAULT);
assertThrowsWithMessage(SchemaValidationException.class, "Duplicate items not allowed.", ()->s.getItems().validateOutput(bad, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Duplicate items not allowed.", ()->s.getItems().getItems().validateOutput(bad, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Duplicate items not allowed.", ()->s.getItems().getItems().getItems().validateOutput(bad, BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Duplicate items not allowed.", ()->s.getItems().getItems().getItems().getItems().validateOutput(bad, BeanContext.DEFAULT));
}
@Content
@Schema(
items=@Items(
mini=1, maxi=2,
items=@SubItems(
mini=2, maxi=3,
items={
"minItems:3,maxItems:4,",
"items:{minItems:4,maxItems:5}"
}
)
)
)
public static class D02 {}
@Test void d02a_minMaxItems_arrays() throws Exception {
var s = HttpPartSchema.create().applyAll(Content.class, D02.class).build();
s.getItems().validateOutput(split("1"), BeanContext.DEFAULT);
s.getItems().getItems().validateOutput(split("1,2"), BeanContext.DEFAULT);
s.getItems().getItems().getItems().validateOutput(split("1,2,3"), BeanContext.DEFAULT);
s.getItems().getItems().getItems().getItems().validateOutput(split("1,2,3,4"), BeanContext.DEFAULT);
s.getItems().validateOutput(split("1,2"), BeanContext.DEFAULT);
s.getItems().getItems().validateOutput(split("1,2,3"), BeanContext.DEFAULT);
s.getItems().getItems().getItems().validateOutput(split("1,2,3,4"), BeanContext.DEFAULT);
s.getItems().getItems().getItems().getItems().validateOutput(split("1,2,3,4,5"), BeanContext.DEFAULT);
assertThrowsWithMessage(SchemaValidationException.class, "Minimum number of items not met.", ()->s.getItems().validateOutput(new String[0], BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Minimum number of items not met.", ()->s.getItems().getItems().validateOutput(split("1"), BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Minimum number of items not met.", ()->s.getItems().getItems().getItems().validateOutput(split("1,2"), BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Minimum number of items not met.", ()->s.getItems().getItems().getItems().getItems().validateOutput(split("1,2,3"), BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum number of items exceeded.", ()->s.getItems().validateOutput(split("1,2,3"), BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum number of items exceeded.", ()->s.getItems().getItems().validateOutput(split("1,2,3,4"), BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum number of items exceeded.", ()->s.getItems().getItems().getItems().validateOutput(split("1,2,3,4,5"), BeanContext.DEFAULT));
assertThrowsWithMessage(SchemaValidationException.class, "Maximum number of items exceeded.", ()->s.getItems().getItems().getItems().getItems().validateOutput(split("1,2,3,4,5,6"), BeanContext.DEFAULT));
}
} |
apache/druid | 37,977 | sql/src/main/java/org/apache/druid/sql/calcite/expression/Expressions.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql.calcite.expression;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterables;
import org.apache.calcite.jdbc.JavaTypeFactoryImpl;
import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rex.RexCall;
import org.apache.calcite.rex.RexInputRef;
import org.apache.calcite.rex.RexLiteral;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.rex.RexOver;
import org.apache.calcite.rex.RexUtil;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularity;
import org.apache.druid.math.expr.Expr;
import org.apache.druid.math.expr.ExpressionType;
import org.apache.druid.query.aggregation.PostAggregator;
import org.apache.druid.query.expression.TimestampFloorExprMacro;
import org.apache.druid.query.extraction.ExtractionFn;
import org.apache.druid.query.extraction.TimeFormatExtractionFn;
import org.apache.druid.query.filter.AndDimFilter;
import org.apache.druid.query.filter.DimFilter;
import org.apache.druid.query.filter.ExpressionDimFilter;
import org.apache.druid.query.filter.IsFalseDimFilter;
import org.apache.druid.query.filter.IsTrueDimFilter;
import org.apache.druid.query.filter.NotDimFilter;
import org.apache.druid.query.filter.NullFilter;
import org.apache.druid.query.filter.OrDimFilter;
import org.apache.druid.query.filter.SelectorDimFilter;
import org.apache.druid.query.ordering.StringComparator;
import org.apache.druid.query.ordering.StringComparators;
import org.apache.druid.segment.column.ColumnHolder;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.column.Types;
import org.apache.druid.sql.calcite.filtration.BoundRefKey;
import org.apache.druid.sql.calcite.filtration.Bounds;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.filtration.RangeRefKey;
import org.apache.druid.sql.calcite.filtration.Ranges;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.ExpressionParser;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.rel.CannotBuildQueryException;
import org.apache.druid.sql.calcite.rel.VirtualColumnRegistry;
import org.apache.druid.sql.calcite.table.RowSignatures;
import org.joda.time.Interval;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
/**
* A collection of functions for translating from Calcite expressions into Druid objects.
*/
public class Expressions
{
private Expressions()
{
// No instantiation.
}
/**
* Old method used to translate a field access, possibly through a projection, to an underlying Druid dataSource.
*
* This exists to provide API compatibility to extensions, but is deprecated because there is a 4 argument version
* that should be used instead. Call sites should have access to a RexBuilder instance that they can get the
* typeFactory from.
*
* @param rowSignature row signature of underlying Druid dataSource
* @param project projection, or null
* @param fieldNumber number of the field to access
*
* @return row expression
*/
@Deprecated
public static RexNode fromFieldAccess(
final RowSignature rowSignature,
@Nullable final Project project,
final int fieldNumber
)
{
//noinspection VariableNotUsedInsideIf
return fromFieldAccess(project == null ? new JavaTypeFactoryImpl() : null, rowSignature, project, fieldNumber);
}
/**
* Translate a field access, possibly through a projection, to an underlying Druid dataSource.
*
* @param typeFactory factory for creating SQL types
* @param rowSignature row signature of underlying Druid dataSource
* @param project projection, or null
* @param fieldNumber number of the field to access
*
* @return row expression
*/
public static RexNode fromFieldAccess(
final RelDataTypeFactory typeFactory,
final RowSignature rowSignature,
@Nullable final Project project,
final int fieldNumber
)
{
if (project == null) {
return RexInputRef.of(fieldNumber, RowSignatures.toRelDataType(rowSignature, typeFactory));
} else {
return project.getProjects().get(fieldNumber);
}
}
/**
* Translate a list of Calcite {@code RexNode} to Druid expressions.
*
* @param plannerContext SQL planner context
* @param rowSignature signature of the rows to be extracted from
* @param rexNodes list of Calcite expressions meant to be applied on top of the rows
*
* @return list of Druid expressions in the same order as rexNodes, or null if not possible.
* If a non-null list is returned, all elements will be non-null.
*/
@Nullable
public static List<DruidExpression> toDruidExpressions(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final List<RexNode> rexNodes
)
{
final List<DruidExpression> retVal = new ArrayList<>(rexNodes.size());
for (RexNode rexNode : rexNodes) {
final DruidExpression druidExpression = toDruidExpression(plannerContext, rowSignature, rexNode);
if (druidExpression == null) {
return null;
}
retVal.add(druidExpression);
}
return retVal;
}
/**
* Translate a list of Calcite {@code RexNode} to Druid expressions, with the possibility of having postagg operands.
*
* @param plannerContext SQL planner context
* @param rowSignature signature of the rows to be extracted from
* @param rexNodes list of Calcite expressions meant to be applied on top of the rows
* @param postAggregatorVisitor visitor that manages postagg names and tracks postaggs that were created as
* by the translation
*
* @return list of Druid expressions in the same order as rexNodes, or null if not possible.
* If a non-null list is returned, all elements will be non-null.
*/
@Nullable
public static List<DruidExpression> toDruidExpressionsWithPostAggOperands(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final List<RexNode> rexNodes,
final PostAggregatorVisitor postAggregatorVisitor
)
{
final List<DruidExpression> retVal = new ArrayList<>(rexNodes.size());
for (RexNode rexNode : rexNodes) {
final DruidExpression druidExpression = toDruidExpressionWithPostAggOperands(
plannerContext,
rowSignature,
rexNode,
postAggregatorVisitor
);
if (druidExpression == null) {
return null;
}
retVal.add(druidExpression);
}
return retVal;
}
/**
* Translate a Calcite {@link RexNode} to a Druid expression for projections or the aggregators that don't
* require numeric inputs.
*
* Consider using {@link org.apache.druid.sql.calcite.aggregation.Aggregations#toDruidExpressionForNumericAggregator}
* for the aggregators that require numeric inputs.
*
* @param plannerContext SQL planner context
* @param rowSignature signature of the rows to be extracted from
* @param rexNode expression meant to be applied on top of the rows
*
* @return DruidExpression referring to fields in rowOrder, or null if not possible to translate
*/
@Nullable
public static DruidExpression toDruidExpression(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode
)
{
return toDruidExpressionWithPostAggOperands(
plannerContext,
rowSignature,
rexNode,
null
);
}
@Nullable
public static DruidExpression toDruidExpressionWithPostAggOperands(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode,
@Nullable final PostAggregatorVisitor postAggregatorVisitor
)
{
final SqlKind kind = rexNode.getKind();
if (kind == SqlKind.INPUT_REF) {
return inputRefToDruidExpression(rowSignature, rexNode);
} else if (rexNode instanceof RexOver) {
throw new CannotBuildQueryException(
StringUtils.format("Unexpected OVER expression during translation [%s]", rexNode)
);
} else if (rexNode instanceof RexCall) {
return rexCallToDruidExpression(plannerContext, rowSignature, rexNode, postAggregatorVisitor);
} else if (kind == SqlKind.LITERAL) {
final DruidLiteral eval = calciteLiteralToDruidLiteral(plannerContext, rexNode);
return eval != null ? DruidExpression.ofLiteral(eval) : null;
} else {
// Can't translate.
return null;
}
}
private static DruidExpression inputRefToDruidExpression(
final RowSignature rowSignature,
final RexNode rexNode
)
{
// Translate field references.
final RexInputRef ref = (RexInputRef) rexNode;
final String columnName = rowSignature.getColumnName(ref.getIndex());
final Optional<ColumnType> columnType = rowSignature.getColumnType(ref.getIndex());
if (columnName == null) {
throw new ISE("Expression referred to nonexistent index[%d]", ref.getIndex());
}
return DruidExpression.ofColumn(columnType.orElse(null), columnName);
}
private static DruidExpression rexCallToDruidExpression(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode,
final PostAggregatorVisitor postAggregatorVisitor
)
{
final SqlOperator operator = ((RexCall) rexNode).getOperator();
final SqlOperatorConversion conversion = plannerContext.getPlannerToolbox().operatorTable()
.lookupOperatorConversion(operator);
if (conversion == null) {
plannerContext.setPlanningError("SQL query requires '%s' operator that is not supported.", operator.getName());
return null;
} else {
if (postAggregatorVisitor != null) {
// try making postagg first
PostAggregator postAggregator = conversion.toPostAggregator(
plannerContext,
rowSignature,
rexNode,
postAggregatorVisitor
);
if (postAggregator != null) {
postAggregatorVisitor.addPostAgg(postAggregator);
String exprName = postAggregator.getName();
return DruidExpression.ofColumn(postAggregator.getType(rowSignature), exprName);
}
}
DruidExpression expression = conversion.toDruidExpressionWithPostAggOperands(
plannerContext,
rowSignature,
rexNode,
postAggregatorVisitor
);
return expression;
}
}
/**
* Create a {@link DruidLiteral} from a literal {@link RexNode}. Necessary because Calcite represents literals using
* different Java classes than Druid does.
*
* @param plannerContext planner context
* @param rexNode Calcite literal
*
* @return converted literal, or null if the literal cannot be converted
*/
@Nullable
public static DruidLiteral calciteLiteralToDruidLiteral(
final PlannerContext plannerContext,
final RexNode rexNode
)
{
if (rexNode.isA(SqlKind.CAST)) {
if (SqlTypeFamily.DATE.contains(rexNode.getType())) {
// Cast to DATE suggests some timestamp flooring. We don't deal with that here, so return null.
return null;
}
final DruidLiteral innerLiteral =
calciteLiteralToDruidLiteral(plannerContext, ((RexCall) rexNode).getOperands().get(0));
if (innerLiteral == null) {
return null;
}
final ColumnType castToColumnType = Calcites.getColumnTypeForRelDataType(rexNode.getType());
if (castToColumnType == null) {
return null;
}
final ExpressionType castToExprType = ExpressionType.fromColumnType(castToColumnType);
if (castToExprType == null) {
return null;
}
return innerLiteral.castTo(castToExprType);
}
// Translate literal.
final SqlTypeName sqlTypeName = rexNode.getType().getSqlTypeName();
final DruidLiteral retVal;
if (RexLiteral.isNullLiteral(rexNode)) {
final ColumnType columnType = Calcites.getColumnTypeForRelDataType(rexNode.getType());
final ExpressionType expressionType = columnType == null ? null : ExpressionType.fromColumnTypeStrict(columnType);
retVal = new DruidLiteral(expressionType, null);
} else if (SqlTypeName.INT_TYPES.contains(sqlTypeName)) {
final Number number = (Number) RexLiteral.value(rexNode);
retVal = new DruidLiteral(ExpressionType.LONG, number == null ? null : number.longValue());
} else if (SqlTypeName.NUMERIC_TYPES.contains(sqlTypeName)) {
// Numeric, non-INT, means we represent it as a double.
final Number number = (Number) RexLiteral.value(rexNode);
retVal = new DruidLiteral(ExpressionType.DOUBLE, number == null ? null : number.doubleValue());
} else if (SqlTypeFamily.INTERVAL_DAY_TIME == sqlTypeName.getFamily()) {
// Calcite represents DAY-TIME intervals in milliseconds.
final long milliseconds = ((Number) RexLiteral.value(rexNode)).longValue();
retVal = new DruidLiteral(ExpressionType.LONG, milliseconds);
} else if (SqlTypeFamily.INTERVAL_YEAR_MONTH == sqlTypeName.getFamily()) {
// Calcite represents YEAR-MONTH intervals in months.
final long months = ((Number) RexLiteral.value(rexNode)).longValue();
retVal = new DruidLiteral(ExpressionType.LONG, months);
} else if (SqlTypeName.STRING_TYPES.contains(sqlTypeName)) {
final String s = RexLiteral.stringValue(rexNode);
retVal = new DruidLiteral(ExpressionType.STRING, s);
} else if (SqlTypeName.TIMESTAMP == sqlTypeName || SqlTypeName.DATE == sqlTypeName) {
retVal = new DruidLiteral(
ExpressionType.LONG,
Calcites.calciteDateTimeLiteralToJoda(rexNode, plannerContext.getTimeZone()).getMillis()
);
} else if (SqlTypeName.BOOLEAN == sqlTypeName) {
retVal = new DruidLiteral(ExpressionType.LONG, RexLiteral.booleanValue(rexNode) ? 1L : 0L);
} else {
// Can't translate other literals.
return null;
}
return retVal;
}
/**
* Translates "condition" to a Druid filter, or returns null if we cannot translate the condition.
*
* @param plannerContext planner context
* @param rowSignature input row signature
* @param virtualColumnRegistry re-usable virtual column references, may be null if virtual columns aren't allowed
* @param expression Calcite row expression
*/
@Nullable
public static DimFilter toFilter(
final PlannerContext plannerContext,
final RowSignature rowSignature,
@Nullable final VirtualColumnRegistry virtualColumnRegistry,
final RexNode expression
)
{
final SqlKind kind = expression.getKind();
if (kind == SqlKind.IS_TRUE
|| kind == SqlKind.IS_NOT_TRUE
|| kind == SqlKind.IS_FALSE
|| kind == SqlKind.IS_NOT_FALSE) {
final DimFilter baseFilter = toFilter(
plannerContext,
rowSignature,
virtualColumnRegistry,
Iterables.getOnlyElement(((RexCall) expression).getOperands())
);
if (kind == SqlKind.IS_TRUE) {
return IsTrueDimFilter.of(baseFilter);
} else if (kind == SqlKind.IS_NOT_TRUE) {
return NotDimFilter.of(IsTrueDimFilter.of(baseFilter));
} else if (kind == SqlKind.IS_FALSE) {
return IsFalseDimFilter.of(baseFilter);
} else { // SqlKind.IS_NOT_FALSE
return NotDimFilter.of(IsFalseDimFilter.of(baseFilter));
}
} else if (kind == SqlKind.CAST && expression.getType().getSqlTypeName() == SqlTypeName.BOOLEAN) {
// Calcite sometimes leaves errant, useless cast-to-booleans inside filters. Strip them and continue.
return toFilter(
plannerContext,
rowSignature,
virtualColumnRegistry,
Iterables.getOnlyElement(((RexCall) expression).getOperands())
);
} else if (kind == SqlKind.AND || kind == SqlKind.OR || kind == SqlKind.NOT) {
final List<DimFilter> filters = new ArrayList<>();
for (final RexNode rexNode : ((RexCall) expression).getOperands()) {
final DimFilter nextFilter = toFilter(
plannerContext,
rowSignature,
virtualColumnRegistry,
rexNode
);
if (nextFilter == null) {
return null;
}
filters.add(nextFilter);
}
if (kind == SqlKind.AND) {
return new AndDimFilter(filters);
} else if (kind == SqlKind.OR) {
return new OrDimFilter(filters);
} else { // SqlKind.NOT
return new NotDimFilter(Iterables.getOnlyElement(filters));
}
} else {
// Handle filter conditions on everything else.
return toLeafFilter(plannerContext, rowSignature, virtualColumnRegistry, expression);
}
}
/**
* Translates "condition" to a Druid filter, assuming it does not contain any boolean expressions. Returns null
* if we cannot translate the condition.
*
* @param plannerContext planner context
* @param rowSignature input row signature
* @param virtualColumnRegistry re-usable virtual column references, may be null if virtual columns aren't allowed
* @param rexNode Calcite row expression
*/
@Nullable
private static DimFilter toLeafFilter(
final PlannerContext plannerContext,
final RowSignature rowSignature,
@Nullable final VirtualColumnRegistry virtualColumnRegistry,
final RexNode rexNode
)
{
if (rexNode.isAlwaysTrue()) {
return Filtration.matchEverything();
} else if (rexNode.isAlwaysFalse()) {
return Filtration.matchNothing();
}
final DimFilter simpleFilter = toSimpleLeafFilter(
plannerContext,
rowSignature,
virtualColumnRegistry,
rexNode
);
return simpleFilter != null
? simpleFilter
: toExpressionLeafFilter(plannerContext, rowSignature, rexNode);
}
/**
* Translates to a simple leaf filter, i.e. not an "expression" type filter. Note that the filter may still
* reference expression virtual columns, if and only if "virtualColumnRegistry" is defined.
*
* @param plannerContext planner context
* @param rowSignature input row signature
* @param virtualColumnRegistry re-usable virtual column references, may be null if virtual columns aren't allowed
* @param rexNode Calcite row expression
*/
@Nullable
private static DimFilter toSimpleLeafFilter(
final PlannerContext plannerContext,
final RowSignature rowSignature,
@Nullable final VirtualColumnRegistry virtualColumnRegistry,
final RexNode rexNode
)
{
final SqlKind kind = rexNode.getKind();
if (kind == SqlKind.IS_TRUE || kind == SqlKind.IS_NOT_FALSE || kind == SqlKind.IS_FALSE || kind == SqlKind.IS_NOT_TRUE) {
// use expression filter to get istrue/notfalse/isfalse/nottrue expressions for correct 3vl behavior
return toExpressionLeafFilter(plannerContext, rowSignature, rexNode);
} else if (kind == SqlKind.IS_NULL || kind == SqlKind.IS_NOT_NULL) {
final RexNode operand = Iterables.getOnlyElement(((RexCall) rexNode).getOperands());
final DruidExpression druidExpression = toDruidExpression(plannerContext, rowSignature, operand);
if (druidExpression == null) {
return null;
}
final DimFilter equalFilter;
final ColumnType outputType = druidExpression.getDruidType();
final boolean isOutputNumeric = Types.isNumeric(outputType);
// if a simple extraction, we can typically use the base column directly for filtering. however, some expressions
// such as cast also appear as a simple extraction because some native layer things can handle the cast
// themselves, so we check the output type of the expression and compare it to the type of the direct column. a
// string column might produce additional null values when converting to a number, so we should use the virtual
// column instead for filtering to ensure that results are correct
if (druidExpression.isSimpleExtraction() &&
!(isOutputNumeric && !rowSignature.isNumeric(druidExpression.getDirectColumn()))) {
if (plannerContext.isUseBoundsAndSelectors()) {
equalFilter = new SelectorDimFilter(
druidExpression.getSimpleExtraction().getColumn(),
null,
druidExpression.getSimpleExtraction().getExtractionFn()
);
} else {
if (druidExpression.getSimpleExtraction().getExtractionFn() != null) {
if (virtualColumnRegistry != null) {
String column = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
druidExpression,
druidExpression.getDruidType()
);
equalFilter = NullFilter.forColumn(column);
} else {
// virtual column registry unavailable, fallback to expression filter
return null;
}
} else {
equalFilter = NullFilter.forColumn(druidExpression.getDirectColumn());
}
}
} else if (virtualColumnRegistry != null) {
final String virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
druidExpression,
operand.getType()
);
if (plannerContext.isUseBoundsAndSelectors()) {
equalFilter = new SelectorDimFilter(virtualColumn, null, null);
} else {
equalFilter = NullFilter.forColumn(virtualColumn);
}
} else {
return null;
}
return kind == SqlKind.IS_NOT_NULL ? new NotDimFilter(equalFilter) : equalFilter;
} else if (kind == SqlKind.EQUALS
|| kind == SqlKind.NOT_EQUALS
|| kind == SqlKind.IS_NOT_DISTINCT_FROM
|| kind == SqlKind.IS_DISTINCT_FROM
|| kind == SqlKind.GREATER_THAN
|| kind == SqlKind.GREATER_THAN_OR_EQUAL
|| kind == SqlKind.LESS_THAN
|| kind == SqlKind.LESS_THAN_OR_EQUAL) {
final List<RexNode> operands = ((RexCall) rexNode).getOperands();
Preconditions.checkState(operands.size() == 2, "Expected 2 operands, got[%s]", operands.size());
boolean flip = false;
RexNode lhs = operands.get(0);
RexNode rhs = operands.get(1);
if (lhs.getKind() == SqlKind.LITERAL && rhs.getKind() != SqlKind.LITERAL) {
// swap lhs, rhs
RexNode x = lhs;
lhs = rhs;
rhs = x;
flip = true;
}
// Flip operator, maybe.
final SqlKind flippedKind;
if (flip) {
switch (kind) {
case EQUALS:
case NOT_EQUALS:
case IS_NOT_DISTINCT_FROM:
case IS_DISTINCT_FROM:
flippedKind = kind;
break;
case GREATER_THAN:
flippedKind = SqlKind.LESS_THAN;
break;
case GREATER_THAN_OR_EQUAL:
flippedKind = SqlKind.LESS_THAN_OR_EQUAL;
break;
case LESS_THAN:
flippedKind = SqlKind.GREATER_THAN;
break;
case LESS_THAN_OR_EQUAL:
flippedKind = SqlKind.GREATER_THAN_OR_EQUAL;
break;
default:
throw new ISE("Kind[%s] not expected here", kind);
}
} else {
flippedKind = kind;
}
final DruidExpression rhsExpression = toDruidExpression(plannerContext, rowSignature, rhs);
final Expr rhsParsed = rhsExpression != null
? plannerContext.parseExpression(rhsExpression.getExpression())
: null;
// rhs must be a literal
if (rhsParsed == null || !rhsParsed.isLiteral()) {
return null;
}
// Translate lhs to a DruidExpression.
final DruidExpression lhsExpression = toDruidExpression(plannerContext, rowSignature, lhs);
if (lhsExpression == null) {
return null;
}
// Special handling for filters like FLOOR(__time TO granularity).
final Granularity queryGranularity =
toQueryGranularity(lhsExpression, plannerContext.getExpressionParser());
if (queryGranularity != null && !RexLiteral.isNullLiteral(rhs)) {
// lhs is a time-floor expression; rhs must be a timestamp or millis
final long rhsMillis;
if (rhs.getType().getSqlTypeName() == SqlTypeName.BIGINT) {
rhsMillis = ((Number) RexLiteral.value(rhs)).longValue();
} else {
rhsMillis = Calcites.calciteDateTimeLiteralToJoda(rhs, plannerContext.getTimeZone()).getMillis();
}
return buildTimeFloorFilter(
ColumnHolder.TIME_COLUMN_NAME,
queryGranularity,
flippedKind,
rhsMillis,
plannerContext
);
}
String column;
final ExtractionFn extractionFn;
if (lhsExpression.isSimpleExtraction()) {
column = lhsExpression.getSimpleExtraction().getColumn();
extractionFn = lhsExpression.getSimpleExtraction().getExtractionFn();
} else if (virtualColumnRegistry != null) {
column = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
lhsExpression,
lhs.getType()
);
extractionFn = null;
} else {
return null;
}
if (column.equals(ColumnHolder.TIME_COLUMN_NAME) && extractionFn instanceof TimeFormatExtractionFn) {
// Check if we can strip the extractionFn and convert the filter to a direct filter on __time.
// This allows potential conversion to query-level "intervals" later on, which is ideal for Druid queries.
final Granularity granularity = ExtractionFns.toQueryGranularity(extractionFn);
if (granularity != null) {
// lhs is FLOOR(__time TO granularity); rhs must be a timestamp.
final long rhsMillis = Calcites.calciteDateTimeLiteralToJoda(rhs, plannerContext.getTimeZone()).getMillis();
return buildTimeFloorFilter(column, granularity, flippedKind, rhsMillis, plannerContext);
}
}
final ColumnType matchValueType = Calcites.getColumnTypeForRelDataType(rhs.getType());
if (plannerContext.isUseBoundsAndSelectors()) {
if (matchValueType == null || !matchValueType.isPrimitive()) {
// Fall back to expression filter.
return null;
}
final String stringVal;
if (rhsParsed.getLiteralValue() == null) {
stringVal = null;
} else if (RexUtil.isLiteral(rhs, true) && SqlTypeName.NUMERIC_TYPES.contains(rhs.getType().getSqlTypeName())) {
// Peek inside the original rhs for numerics, rather than using the parsed version, for highest fidelity
// to what the query originally contained. (It may be a BigDecimal.)
stringVal = String.valueOf(RexLiteral.value(rhs));
} else {
stringVal = String.valueOf(rhsParsed.getLiteralValue());
}
if (stringVal == null) {
// Fall back to expression filter.
return null;
}
// Numeric lhs needs a numeric comparison.
final StringComparator comparator = Calcites.getStringComparatorForRelDataType(lhs.getType());
if (comparator == null) {
// Type is not comparable.
return null;
}
final BoundRefKey boundRefKey = new BoundRefKey(column, extractionFn, comparator);
final DimFilter filter;
// Always use BoundDimFilters, to simplify filter optimization later (it helps to remember the comparator).
switch (flippedKind) {
case EQUALS:
case IS_NOT_DISTINCT_FROM:
// OK to treat EQUALS, IS_NOT_DISTINCT_FROM the same since we know stringVal is nonnull.
filter = Bounds.equalTo(boundRefKey, stringVal);
break;
case NOT_EQUALS:
case IS_DISTINCT_FROM:
// OK to treat NOT_EQUALS, IS_DISTINCT_FROM the same since we know stringVal is nonnull.
filter = new NotDimFilter(Bounds.equalTo(boundRefKey, stringVal));
break;
case GREATER_THAN:
filter = Bounds.greaterThan(boundRefKey, stringVal);
break;
case GREATER_THAN_OR_EQUAL:
filter = Bounds.greaterThanOrEqualTo(boundRefKey, stringVal);
break;
case LESS_THAN:
filter = Bounds.lessThan(boundRefKey, stringVal);
break;
case LESS_THAN_OR_EQUAL:
filter = Bounds.lessThanOrEqualTo(boundRefKey, stringVal);
break;
default:
throw new IllegalStateException("Shouldn't have got here");
}
return filter;
} else {
final Object val = rhsParsed.getLiteralValue();
if (val == null) {
// fall back to expression filter
return null;
}
// extractionFn are not supported by equality/range filter
if (extractionFn != null) {
if (virtualColumnRegistry != null) {
column = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
lhsExpression,
lhs.getType()
);
} else {
// if this happens for some reason, bail and use an expression filter
return null;
}
}
final RangeRefKey rangeRefKey = new RangeRefKey(column, matchValueType);
final DimFilter filter;
// Always use RangeFilter, to simplify filter optimization later
switch (flippedKind) {
case EQUALS:
case IS_NOT_DISTINCT_FROM:
filter = Ranges.equalTo(rangeRefKey, val);
break;
case NOT_EQUALS:
case IS_DISTINCT_FROM:
filter = new NotDimFilter(Ranges.equalTo(rangeRefKey, val));
break;
case GREATER_THAN:
filter = Ranges.greaterThan(rangeRefKey, val);
break;
case GREATER_THAN_OR_EQUAL:
filter = Ranges.greaterThanOrEqualTo(rangeRefKey, val);
break;
case LESS_THAN:
filter = Ranges.lessThan(rangeRefKey, val);
break;
case LESS_THAN_OR_EQUAL:
filter = Ranges.lessThanOrEqualTo(rangeRefKey, val);
break;
default:
throw new IllegalStateException("Shouldn't have got here");
}
return filter;
}
} else if (rexNode instanceof RexCall) {
final SqlOperator operator = ((RexCall) rexNode).getOperator();
final SqlOperatorConversion conversion = plannerContext.getPlannerToolbox()
.operatorTable()
.lookupOperatorConversion(operator);
if (conversion == null) {
return null;
} else {
return conversion.toDruidFilter(plannerContext, rowSignature, virtualColumnRegistry, rexNode);
}
} else {
return null;
}
}
/**
* Translates to an "expression" type leaf filter. Used as a fallback if we can't use a simple leaf filter.
*/
@Nullable
private static DimFilter toExpressionLeafFilter(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode
)
{
final DruidExpression druidExpression = toDruidExpression(plannerContext, rowSignature, rexNode);
if (druidExpression != null) {
return new ExpressionDimFilter(
druidExpression.getExpression(),
plannerContext.parseExpression(druidExpression.getExpression()),
null
);
}
return null;
}
/**
* Converts an expression to a Granularity, if possible. This is possible if, and only if, the expression
* is a timestamp_floor function on the __time column with literal parameters for period, origin, and timeZone.
*
* @return granularity or null if not possible
*/
@Nullable
public static Granularity toQueryGranularity(final DruidExpression expression, final ExpressionParser parser)
{
final TimestampFloorExprMacro.TimestampFloorExpr expr = asTimestampFloorExpr(expression, parser);
if (expr == null) {
return null;
}
final Expr arg = expr.getArg();
final Granularity granularity = expr.getGranularity();
if (ColumnHolder.TIME_COLUMN_NAME.equals(arg.getBindingIfIdentifier())) {
return granularity;
} else {
return null;
}
}
@Nullable
public static TimestampFloorExprMacro.TimestampFloorExpr asTimestampFloorExpr(
final DruidExpression expression,
final ExpressionParser parser
)
{
final Expr expr = parser.parse(expression.getExpression());
if (expr instanceof TimestampFloorExprMacro.TimestampFloorExpr) {
return (TimestampFloorExprMacro.TimestampFloorExpr) expr;
} else {
return null;
}
}
/**
* Build a filter for an expression like FLOOR(column TO granularity) [operator] rhsMillis
*/
private static DimFilter buildTimeFloorFilter(
final String column,
final Granularity granularity,
final SqlKind operatorKind,
final long rhsMillis,
final PlannerContext plannerContext
)
{
final Interval rhsInterval = granularity.bucket(DateTimes.utc(rhsMillis));
// Is rhs aligned on granularity boundaries?
final boolean rhsAligned = rhsInterval.getStartMillis() == rhsMillis;
if (plannerContext.isUseBoundsAndSelectors()) {
final BoundRefKey boundRefKey = new BoundRefKey(column, null, StringComparators.NUMERIC);
return getBoundTimeDimFilter(operatorKind, boundRefKey, rhsInterval, rhsAligned);
} else {
final RangeRefKey rangeRefKey = new RangeRefKey(column, ColumnType.LONG);
return getRangeTimeDimFilter(operatorKind, rangeRefKey, rhsInterval, rhsAligned);
}
}
private static DimFilter getBoundTimeDimFilter(
SqlKind operatorKind,
BoundRefKey boundRefKey,
Interval interval,
boolean isAligned
)
{
switch (operatorKind) {
case EQUALS:
return isAligned
? Bounds.interval(boundRefKey, interval)
: Filtration.matchNothing();
case NOT_EQUALS:
return isAligned
? new NotDimFilter(Bounds.interval(boundRefKey, interval))
: Filtration.matchEverything();
case GREATER_THAN:
return Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(interval.getEndMillis()));
case GREATER_THAN_OR_EQUAL:
return isAligned
? Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(interval.getStartMillis()))
: Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(interval.getEndMillis()));
case LESS_THAN:
return isAligned
? Bounds.lessThan(boundRefKey, String.valueOf(interval.getStartMillis()))
: Bounds.lessThan(boundRefKey, String.valueOf(interval.getEndMillis()));
case LESS_THAN_OR_EQUAL:
return Bounds.lessThan(boundRefKey, String.valueOf(interval.getEndMillis()));
default:
throw new IllegalStateException("Shouldn't have got here");
}
}
private static DimFilter getRangeTimeDimFilter(
SqlKind operatorKind,
RangeRefKey rangeRefKey,
Interval interval,
boolean isAligned
)
{
switch (operatorKind) {
case EQUALS:
return isAligned
? Ranges.interval(rangeRefKey, interval)
: Filtration.matchNothing();
case NOT_EQUALS:
return isAligned
? new NotDimFilter(Ranges.interval(rangeRefKey, interval))
: Filtration.matchEverything();
case GREATER_THAN:
return Ranges.greaterThanOrEqualTo(rangeRefKey, interval.getEndMillis());
case GREATER_THAN_OR_EQUAL:
return isAligned
? Ranges.greaterThanOrEqualTo(rangeRefKey, interval.getStartMillis())
: Ranges.greaterThanOrEqualTo(rangeRefKey, interval.getEndMillis());
case LESS_THAN:
return isAligned
? Ranges.lessThan(rangeRefKey, interval.getStartMillis())
: Ranges.lessThan(rangeRefKey, interval.getEndMillis());
case LESS_THAN_OR_EQUAL:
return Ranges.lessThan(rangeRefKey, interval.getEndMillis());
default:
throw new IllegalStateException("Shouldn't have got here");
}
}
}
|
apache/phoenix | 38,108 | phoenix-core-server/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionScanner.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.coprocessor;
import static org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver.checkForLocalIndexColumnFamilies;
import static org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver.deserializeExpressions;
import static org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver.deserializeTable;
import static org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver.getBlockingMemstoreSize;
import static org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver.setIndexAndTransactionProperties;
import static org.apache.phoenix.coprocessorclient.BaseScannerRegionObserverConstants.LOCAL_INDEX_BUILD;
import static org.apache.phoenix.coprocessorclient.BaseScannerRegionObserverConstants.LOCAL_INDEX_BUILD_PROTO;
import static org.apache.phoenix.coprocessorclient.BaseScannerRegionObserverConstants.REPLAY_WRITES;
import static org.apache.phoenix.coprocessorclient.BaseScannerRegionObserverConstants.UPGRADE_DESC_ROW_KEY;
import static org.apache.phoenix.query.QueryConstants.AGG_TIMESTAMP;
import static org.apache.phoenix.query.QueryConstants.SINGLE_COLUMN;
import static org.apache.phoenix.query.QueryConstants.SINGLE_COLUMN_FAMILY;
import static org.apache.phoenix.query.QueryConstants.UNGROUPED_AGG_ROW_KEY;
import static org.apache.phoenix.query.QueryServices.MUTATE_BATCH_SIZE_ATTRIB;
import static org.apache.phoenix.query.QueryServices.MUTATE_BATCH_SIZE_BYTES_ATTRIB;
import static org.apache.phoenix.query.QueryServices.SOURCE_OPERATION_ATTRIB;
import static org.apache.phoenix.schema.PTableImpl.getColumnsToClone;
import static org.apache.phoenix.util.ScanUtil.getPageSizeMsForRegionScanner;
import static org.apache.phoenix.util.ScanUtil.isDummy;
import static org.apache.phoenix.util.WALAnnotationUtil.annotateMutation;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.regionserver.PhoenixScannerContext;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.regionserver.ScannerContext;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.phoenix.cache.GlobalCache;
import org.apache.phoenix.cache.TenantCache;
import org.apache.phoenix.coprocessorclient.BaseScannerRegionObserverConstants;
import org.apache.phoenix.exception.DataExceedsCapacityException;
import org.apache.phoenix.execute.MutationState;
import org.apache.phoenix.expression.Expression;
import org.apache.phoenix.expression.aggregator.Aggregator;
import org.apache.phoenix.expression.aggregator.Aggregators;
import org.apache.phoenix.expression.aggregator.ServerAggregators;
import org.apache.phoenix.hbase.index.ValueGetter;
import org.apache.phoenix.hbase.index.covered.update.ColumnReference;
import org.apache.phoenix.hbase.index.util.GenericKeyValueBuilder;
import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
import org.apache.phoenix.index.IndexMaintainer;
import org.apache.phoenix.index.PhoenixIndexBuilderHelper;
import org.apache.phoenix.index.PhoenixIndexCodec;
import org.apache.phoenix.memory.InsufficientMemoryException;
import org.apache.phoenix.memory.MemoryManager;
import org.apache.phoenix.query.QueryConstants;
import org.apache.phoenix.query.QueryServicesOptions;
import org.apache.phoenix.schema.PColumn;
import org.apache.phoenix.schema.PRow;
import org.apache.phoenix.schema.PTable;
import org.apache.phoenix.schema.PTableImpl;
import org.apache.phoenix.schema.PTableType;
import org.apache.phoenix.schema.RowKeySchema;
import org.apache.phoenix.schema.SortOrder;
import org.apache.phoenix.schema.TableRef;
import org.apache.phoenix.schema.ValueSchema;
import org.apache.phoenix.schema.tuple.EncodedColumnQualiferCellsList;
import org.apache.phoenix.schema.tuple.MultiKeyValueTuple;
import org.apache.phoenix.schema.tuple.PositionBasedMultiKeyValueTuple;
import org.apache.phoenix.schema.tuple.Tuple;
import org.apache.phoenix.schema.types.PBinary;
import org.apache.phoenix.schema.types.PChar;
import org.apache.phoenix.schema.types.PDataType;
import org.apache.phoenix.schema.types.PDouble;
import org.apache.phoenix.schema.types.PFloat;
import org.apache.phoenix.transaction.PhoenixTransactionProvider;
import org.apache.phoenix.transaction.TransactionFactory;
import org.apache.phoenix.util.ByteUtil;
import org.apache.phoenix.util.ClientUtil;
import org.apache.phoenix.util.EncodedColumnsUtil;
import org.apache.phoenix.util.ExpressionUtil;
import org.apache.phoenix.util.IndexUtil;
import org.apache.phoenix.util.LogUtil;
import org.apache.phoenix.util.PhoenixKeyValueUtil;
import org.apache.phoenix.util.ScanUtil;
import org.apache.phoenix.util.ServerUtil;
import org.apache.phoenix.util.StringUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.phoenix.thirdparty.com.google.common.collect.Sets;
import org.apache.phoenix.thirdparty.com.google.common.primitives.Ints;
public class UngroupedAggregateRegionScanner extends BaseRegionScanner {
private static final Logger LOGGER =
LoggerFactory.getLogger(UngroupedAggregateRegionScanner.class);
private long pageSizeMs;
private int maxBatchSize = 0;
private Scan scan;
private RegionScanner innerScanner;
private Region region;
private final UngroupedAggregateRegionObserver ungroupedAggregateRegionObserver;
private final RegionCoprocessorEnvironment env;
private final boolean useQualifierAsIndex;
private boolean needToWrite = false;
private final Pair<Integer, Integer> minMaxQualifiers;
private byte[][] values = null;
private final PTable.QualifierEncodingScheme encodingScheme;
private PTable writeToTable = null;
private PTable projectedTable = null;
private final boolean isDescRowKeyOrderUpgrade;
private final int offset;
private final boolean buildLocalIndex;
private final List<IndexMaintainer> indexMaintainers;
private boolean isPKChanging = false;
private final long ts;
private PhoenixTransactionProvider txnProvider = null;
private final UngroupedAggregateRegionObserver.MutationList indexMutations;
private boolean isDelete = false;
private final byte[] replayMutations;
private boolean isUpsert = false;
private List<Expression> selectExpressions = null;
private byte[] deleteCQ = null;
private byte[] deleteCF = null;
private byte[] emptyCF = null;
private byte[] emptyCQ = null;
private final byte[] indexUUID;
private final byte[] txState;
private final byte[] clientVersionBytes;
private final long blockingMemStoreSize;
private long maxBatchSizeBytes = 0L;
private Table targetHTable = null;
private boolean incrScanRefCount = false;
private byte[] indexMaintainersPtr;
private boolean useIndexProto;
/**
* Single row atomic delete that requires returning result (row) back to client only if the row is
* successfully deleted by the given thread.
*/
private boolean isSingleRowDelete = false;
public UngroupedAggregateRegionScanner(final ObserverContext<RegionCoprocessorEnvironment> c,
final RegionScanner innerScanner, final Region region, final Scan scan,
final RegionCoprocessorEnvironment env,
final UngroupedAggregateRegionObserver ungroupedAggregateRegionObserver)
throws IOException, SQLException {
super(innerScanner);
this.env = env;
this.region = region;
this.scan = scan;
this.ungroupedAggregateRegionObserver = ungroupedAggregateRegionObserver;
this.innerScanner = innerScanner;
Configuration conf = env.getConfiguration();
pageSizeMs = getPageSizeMsForRegionScanner(scan);
ts = scan.getTimeRange().getMax();
boolean localIndexScan = ScanUtil.isLocalIndex(scan);
encodingScheme = EncodedColumnsUtil.getQualifierEncodingScheme(scan);
int offsetToBe = 0;
if (localIndexScan) {
/*
* For local indexes, we need to set an offset on row key expressions to skip the region start
* key.
*/
offsetToBe = region.getRegionInfo().getStartKey().length != 0
? region.getRegionInfo().getStartKey().length
: region.getRegionInfo().getEndKey().length;
ScanUtil.setRowKeyOffset(scan, offsetToBe);
}
offset = offsetToBe;
byte[] descRowKeyTableBytes = scan.getAttribute(UPGRADE_DESC_ROW_KEY);
isDescRowKeyOrderUpgrade = descRowKeyTableBytes != null;
if (isDescRowKeyOrderUpgrade) {
LOGGER.debug("Upgrading row key for " + region.getRegionInfo().getTable().getNameAsString());
projectedTable = deserializeTable(descRowKeyTableBytes);
try {
writeToTable =
PTableImpl.builderWithColumns(projectedTable, getColumnsToClone(projectedTable))
.setRowKeyOrderOptimizable(true).build();
} catch (SQLException e) {
ClientUtil.throwIOException("Upgrade failed", e); // Impossible
}
values = new byte[projectedTable.getPKColumns().size()][];
}
boolean useProto = false;
byte[] localIndexBytes = scan.getAttribute(LOCAL_INDEX_BUILD_PROTO);
useProto = localIndexBytes != null;
if (localIndexBytes == null) {
localIndexBytes = scan.getAttribute(LOCAL_INDEX_BUILD);
}
indexMaintainers =
localIndexBytes == null ? null : IndexMaintainer.deserialize(localIndexBytes, useProto);
indexMutations = localIndexBytes == null
? new UngroupedAggregateRegionObserver.MutationList()
: new UngroupedAggregateRegionObserver.MutationList(1024);
byte[] transforming = scan.getAttribute(BaseScannerRegionObserverConstants.DO_TRANSFORMING);
replayMutations = scan.getAttribute(REPLAY_WRITES);
indexUUID = scan.getAttribute(PhoenixIndexCodec.INDEX_UUID);
txState = scan.getAttribute(BaseScannerRegionObserverConstants.TX_STATE);
clientVersionBytes = scan.getAttribute(BaseScannerRegionObserverConstants.CLIENT_VERSION);
if (txState != null) {
int clientVersion = clientVersionBytes == null
? ScanUtil.UNKNOWN_CLIENT_VERSION
: Bytes.toInt(clientVersionBytes);
txnProvider = TransactionFactory.getTransactionProvider(txState, clientVersion);
}
byte[] upsertSelectTable =
scan.getAttribute(BaseScannerRegionObserverConstants.UPSERT_SELECT_TABLE);
if (upsertSelectTable != null) {
isUpsert = true;
projectedTable = deserializeTable(upsertSelectTable);
// The Connection is a singleton. It MUST NOT be closed.
targetHTable = ServerUtil.ConnectionFactory
.getConnection(ServerUtil.ConnectionType.DEFAULT_SERVER_CONNECTION, env)
.getTable(TableName.valueOf(projectedTable.getPhysicalName().getBytes()));
selectExpressions = deserializeExpressions(
scan.getAttribute(BaseScannerRegionObserverConstants.UPSERT_SELECT_EXPRS));
values = new byte[projectedTable.getPKColumns().size()][];
isPKChanging =
ExpressionUtil.isPkPositionChanging(new TableRef(projectedTable), selectExpressions);
} else {
byte[] isDeleteAgg = scan.getAttribute(BaseScannerRegionObserverConstants.DELETE_AGG);
isDelete = isDeleteAgg != null && Bytes.compareTo(PDataType.TRUE_BYTES, isDeleteAgg) == 0;
byte[] singleRowDelete =
scan.getAttribute(BaseScannerRegionObserverConstants.SINGLE_ROW_DELETE);
isSingleRowDelete =
singleRowDelete != null && Bytes.compareTo(PDataType.TRUE_BYTES, singleRowDelete) == 0;
if (isSingleRowDelete) {
// The Connection is a singleton. It MUST NOT be closed.
targetHTable = ServerUtil.ConnectionFactory
.getConnection(ServerUtil.ConnectionType.DEFAULT_SERVER_CONNECTION, env)
.getTable(region.getRegionInfo().getTable());
}
if (!isDelete) {
deleteCF = scan.getAttribute(BaseScannerRegionObserverConstants.DELETE_CF);
deleteCQ = scan.getAttribute(BaseScannerRegionObserverConstants.DELETE_CQ);
}
emptyCF = scan.getAttribute(BaseScannerRegionObserverConstants.EMPTY_CF);
emptyCQ = scan.getAttribute(BaseScannerRegionObserverConstants.EMPTY_COLUMN_QUALIFIER);
if (emptyCF != null && emptyCQ == null) {
// In case some old version sets EMPTY_CF but not EMPTY_COLUMN_QUALIFIER
// Not sure if it's really needed, but better safe than sorry
emptyCQ = QueryConstants.EMPTY_COLUMN_BYTES;
}
}
ColumnReference[] dataColumns = IndexUtil.deserializeDataTableColumnsToJoin(scan);
useQualifierAsIndex =
EncodedColumnsUtil.useQualifierAsIndex(EncodedColumnsUtil.getMinMaxQualifiersFromScan(scan));
/**
* Slow down the writes if the memstore size more than (hbase.hregion.memstore.block.multiplier
* - 1) times hbase.hregion.memstore.flush.size bytes. This avoids flush storm to hdfs for cases
* like index building where reads and write happen to all the table regions in the server.
*/
blockingMemStoreSize = getBlockingMemstoreSize(region, conf);
buildLocalIndex = indexMaintainers != null && dataColumns == null && !localIndexScan;
if (buildLocalIndex) {
checkForLocalIndexColumnFamilies(region, indexMaintainers);
}
if (
isDescRowKeyOrderUpgrade || isDelete || isUpsert || (deleteCQ != null && deleteCF != null)
|| emptyCF != null || buildLocalIndex
) {
needToWrite = true;
if (
(isUpsert && (targetHTable == null
|| !targetHTable.getName().equals(region.getTableDescriptor().getTableName())))
) {
needToWrite = false;
}
maxBatchSize =
conf.getInt(MUTATE_BATCH_SIZE_ATTRIB, QueryServicesOptions.DEFAULT_MUTATE_BATCH_SIZE);
maxBatchSizeBytes = conf.getLongBytes(MUTATE_BATCH_SIZE_BYTES_ATTRIB,
QueryServicesOptions.DEFAULT_MUTATE_BATCH_SIZE_BYTES);
}
minMaxQualifiers = EncodedColumnsUtil.getMinMaxQualifiersFromScan(scan);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(LogUtil.addCustomAnnotations(
"Starting ungrouped coprocessor scan " + scan + " " + region.getRegionInfo(),
ScanUtil.getCustomAnnotations(scan)));
}
useIndexProto = true;
indexMaintainersPtr = scan.getAttribute(PhoenixIndexCodec.INDEX_PROTO_MD);
// for backward compatiblity fall back to look by the old attribute
if (indexMaintainersPtr == null) {
indexMaintainersPtr = scan.getAttribute(PhoenixIndexCodec.INDEX_MD);
useIndexProto = false;
}
if (needToWrite) {
ungroupedAggregateRegionObserver.incrementScansReferenceCount();
incrScanRefCount = true;
}
}
@Override
public RegionInfo getRegionInfo() {
return region.getRegionInfo();
}
@Override
public boolean isFilterDone() {
return false;
}
@Override
public void close() throws IOException {
if (needToWrite && incrScanRefCount) {
ungroupedAggregateRegionObserver.decrementScansReferenceCount();
}
try {
if (targetHTable != null) {
try {
targetHTable.close();
} catch (IOException e) {
LOGGER.error("Closing table: " + targetHTable + " failed: ", e);
}
}
} finally {
innerScanner.close();
}
}
boolean descRowKeyOrderUpgrade(List<Cell> results, ImmutableBytesWritable ptr,
UngroupedAggregateRegionObserver.MutationList mutations) throws IOException {
Arrays.fill(values, null);
Cell firstKV = results.get(0);
RowKeySchema schema = projectedTable.getRowKeySchema();
int maxOffset = schema.iterator(firstKV.getRowArray(), firstKV.getRowOffset() + offset,
firstKV.getRowLength(), ptr);
for (int i = 0; i < schema.getFieldCount(); i++) {
Boolean hasValue = schema.next(ptr, i, maxOffset);
if (hasValue == null) {
break;
}
ValueSchema.Field field = schema.getField(i);
if (field.getSortOrder() == SortOrder.DESC) {
// Special case for re-writing DESC ARRAY, as the actual byte value needs to change in this
// case
if (field.getDataType().isArrayType()) {
field.getDataType().coerceBytes(ptr, null, field.getDataType(), field.getMaxLength(),
field.getScale(), field.getSortOrder(), field.getMaxLength(), field.getScale(),
field.getSortOrder(), true); // force to use correct separator byte
}
// Special case for re-writing DESC CHAR or DESC BINARY, to force the re-writing of trailing
// space characters
else if (field.getDataType() == PChar.INSTANCE || field.getDataType() == PBinary.INSTANCE) {
int len = ptr.getLength();
while (len > 0 && ptr.get()[ptr.getOffset() + len - 1] == StringUtil.SPACE_UTF8) {
len--;
}
ptr.set(ptr.get(), ptr.getOffset(), len);
// Special case for re-writing DESC FLOAT and DOUBLE, as they're not inverted like they
// should be (PHOENIX-2171)
} else
if (field.getDataType() == PFloat.INSTANCE || field.getDataType() == PDouble.INSTANCE) {
byte[] invertedBytes = SortOrder.invert(ptr.get(), ptr.getOffset(), ptr.getLength());
ptr.set(invertedBytes);
}
} else if (field.getDataType() == PBinary.INSTANCE) {
// Remove trailing space characters so that the setValues call below will replace them
// with the correct zero byte character. Note this is somewhat dangerous as these
// could be legit, but I don't know what the alternative is.
int len = ptr.getLength();
while (len > 0 && ptr.get()[ptr.getOffset() + len - 1] == StringUtil.SPACE_UTF8) {
len--;
}
ptr.set(ptr.get(), ptr.getOffset(), len);
}
values[i] = ptr.copyBytes();
}
writeToTable.newKey(ptr, values);
if (
Bytes.compareTo(firstKV.getRowArray(), firstKV.getRowOffset() + offset,
firstKV.getRowLength(), ptr.get(), ptr.getOffset() + offset, ptr.getLength()) == 0
) {
return false;
}
byte[] newRow = ByteUtil.copyKeyBytesIfNecessary(ptr);
if (offset > 0) { // for local indexes (prepend region start key)
byte[] newRowWithOffset = new byte[offset + newRow.length];
System.arraycopy(firstKV.getRowArray(), firstKV.getRowOffset(), newRowWithOffset, 0, offset);
System.arraycopy(newRow, 0, newRowWithOffset, offset, newRow.length);
newRow = newRowWithOffset;
}
byte[] oldRow =
Bytes.copy(firstKV.getRowArray(), firstKV.getRowOffset(), firstKV.getRowLength());
for (Cell cell : results) {
// Copy existing cell but with new row key
Cell newCell = CellBuilderFactory.create(CellBuilderType.DEEP_COPY).setRow(newRow)
.setFamily(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength())
.setQualifier(cell.getQualifierArray(), cell.getQualifierOffset(),
cell.getQualifierLength())
.setTimestamp(cell.getTimestamp()).setType(cell.getType())
.setValue(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()).build();
switch (cell.getType()) {
case Put:
// If Put, point delete old Put
Delete del = new Delete(oldRow);
Cell newDelCell = CellBuilderFactory.create(CellBuilderType.DEEP_COPY).setRow(newRow)
.setFamily(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength())
.setQualifier(cell.getQualifierArray(), cell.getQualifierOffset(),
cell.getQualifierLength())
.setTimestamp(cell.getTimestamp()).setType(Cell.Type.Delete)
.setValue(ByteUtil.EMPTY_BYTE_ARRAY, 0, 0).build();
del.add(newDelCell);
mutations.add(del);
Put put = new Put(newRow);
put.add(newCell);
mutations.add(put);
break;
case Delete:
case DeleteColumn:
case DeleteFamily:
case DeleteFamilyVersion:
Delete delete = new Delete(newRow);
delete.add(newCell);
mutations.add(delete);
break;
}
}
return true;
}
void buildLocalIndex(Tuple result, List<Cell> results, ImmutableBytesWritable ptr)
throws IOException {
for (IndexMaintainer maintainer : indexMaintainers) {
if (!results.isEmpty()) {
result.getKey(ptr);
ValueGetter valueGetter = maintainer
.createGetterFromKeyValues(ImmutableBytesPtr.copyBytesIfNecessary(ptr), results);
Put put = maintainer.buildUpdateMutation(GenericKeyValueBuilder.INSTANCE, valueGetter, ptr,
results.get(0).getTimestamp(), env.getRegion().getRegionInfo().getStartKey(),
env.getRegion().getRegionInfo().getEndKey(), false);
if (txnProvider != null) {
put = txnProvider.markPutAsCommitted(put, ts, ts);
}
indexMutations.add(put);
}
}
result.setKeyValues(results);
}
void deleteRow(List<Cell> results, UngroupedAggregateRegionObserver.MutationList mutations) {
Cell firstKV = results.get(0);
Delete delete =
new Delete(firstKV.getRowArray(), firstKV.getRowOffset(), firstKV.getRowLength(), ts);
if (replayMutations != null) {
delete.setAttribute(REPLAY_WRITES, replayMutations);
}
byte[] sourceOperationBytes = scan.getAttribute(SOURCE_OPERATION_ATTRIB);
if (sourceOperationBytes != null) {
delete.setAttribute(SOURCE_OPERATION_ATTRIB, sourceOperationBytes);
}
if (isSingleRowDelete) {
delete.setAttribute(PhoenixIndexBuilderHelper.RETURN_RESULT,
PhoenixIndexBuilderHelper.RETURN_RESULT_ROW);
}
mutations.add(delete);
}
void deleteCForQ(Tuple result, List<Cell> results,
UngroupedAggregateRegionObserver.MutationList mutations) {
// No need to search for delete column, since we project only it
// if no empty key value is being set
if (emptyCF == null || result.getValue(deleteCF, deleteCQ) != null) {
Delete delete = new Delete(results.get(0).getRowArray(), results.get(0).getRowOffset(),
results.get(0).getRowLength());
delete.addColumns(deleteCF, deleteCQ, ts);
// TODO: We need to set SOURCE_OPERATION_ATTRIB here also. The control will come here if
// TODO: we drop a column. We also delete metadata from SYSCAT table for the dropped column
// TODO: and delete the column. In short, we need to set this attribute for the DM for SYSCAT
// metadata
// TODO: and for data table rows.
mutations.add(delete);
}
}
void upsert(Tuple result, ImmutableBytesWritable ptr,
UngroupedAggregateRegionObserver.MutationList mutations) {
Arrays.fill(values, null);
int bucketNumOffset = 0;
if (projectedTable.getBucketNum() != null) {
values[0] = new byte[] { 0 };
bucketNumOffset = 1;
}
int i = bucketNumOffset;
List<PColumn> projectedColumns = projectedTable.getColumns();
for (; i < projectedTable.getPKColumns().size(); i++) {
Expression expression = selectExpressions.get(i - bucketNumOffset);
if (expression.evaluate(result, ptr)) {
values[i] = ptr.copyBytes();
// If SortOrder from expression in SELECT doesn't match the
// column being projected into then invert the bits.
if (expression.getSortOrder() != projectedColumns.get(i).getSortOrder()) {
SortOrder.invert(values[i], 0, values[i], 0, values[i].length);
}
} else {
values[i] = ByteUtil.EMPTY_BYTE_ARRAY;
}
}
projectedTable.newKey(ptr, values);
PRow row = projectedTable.newRow(GenericKeyValueBuilder.INSTANCE, ts, ptr, false);
for (; i < projectedColumns.size(); i++) {
Expression expression = selectExpressions.get(i - bucketNumOffset);
if (expression.evaluate(result, ptr)) {
PColumn column = projectedColumns.get(i);
if (
!column.getDataType().isSizeCompatible(ptr, null, expression.getDataType(),
expression.getSortOrder(), expression.getMaxLength(), expression.getScale(),
column.getMaxLength(), column.getScale())
) {
throw new DataExceedsCapacityException(column.getDataType(), column.getMaxLength(),
column.getScale(), column.getName().getString());
}
column.getDataType().coerceBytes(ptr, null, expression.getDataType(),
expression.getMaxLength(), expression.getScale(), expression.getSortOrder(),
column.getMaxLength(), column.getScale(), column.getSortOrder(),
projectedTable.rowKeyOrderOptimizable());
byte[] bytes = ByteUtil.copyKeyBytesIfNecessary(ptr);
row.setValue(column, bytes);
}
}
for (Mutation mutation : row.toRowMutations()) {
if (replayMutations != null) {
mutation.setAttribute(REPLAY_WRITES, replayMutations);
} else if (txnProvider != null && projectedTable.getType() == PTableType.INDEX) {
mutation = txnProvider.markPutAsCommitted((Put) mutation, ts, ts);
}
mutations.add(mutation);
}
for (i = 0; i < selectExpressions.size(); i++) {
selectExpressions.get(i).reset();
}
}
void insertEmptyKeyValue(List<Cell> results,
UngroupedAggregateRegionObserver.MutationList mutations) {
Set<Long> timeStamps = Sets.newHashSetWithExpectedSize(results.size());
for (Cell kv : results) {
long kvts = kv.getTimestamp();
if (!timeStamps.contains(kvts)) {
Put put = new Put(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength());
// The value is not dependent on encoding ("x")
put.addColumn(emptyCF, emptyCQ, kvts, QueryConstants.EMPTY_COLUMN_VALUE_BYTES);
mutations.add(put);
timeStamps.add(kvts);
}
}
}
@Override
public boolean nextRaw(List<Cell> results, ScannerContext scannerContext) throws IOException {
return next(results, scannerContext);
}
@Override
public boolean next(List<Cell> resultsToReturn) throws IOException {
return next(resultsToReturn, null);
}
@Override
public boolean next(List<Cell> resultsToReturn, ScannerContext scannerContext)
throws IOException {
boolean hasMore;
Configuration conf = env.getConfiguration();
final TenantCache tenantCache = GlobalCache.getTenantCache(env, ScanUtil.getTenantId(scan));
try (MemoryManager.MemoryChunk em = tenantCache.getMemoryManager().allocate(0)) {
Aggregators aggregators = ServerAggregators
.deserialize(scan.getAttribute(BaseScannerRegionObserverConstants.AGGREGATORS), conf, em);
Aggregator[] rowAggregators = aggregators.getAggregators();
aggregators.reset(rowAggregators);
Cell lastCell = null;
boolean hasAny = false;
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
Tuple result =
useQualifierAsIndex ? new PositionBasedMultiKeyValueTuple() : new MultiKeyValueTuple();
UngroupedAggregateRegionObserver.MutationList mutations =
new UngroupedAggregateRegionObserver.MutationList();
if (
isDescRowKeyOrderUpgrade || isDelete || isUpsert || (deleteCQ != null && deleteCF != null)
|| emptyCF != null || buildLocalIndex
) {
mutations = new UngroupedAggregateRegionObserver.MutationList(
Ints.saturatedCast(maxBatchSize + maxBatchSize / 10));
}
Result atomicSingleRowDeleteResult = null;
region.startRegionOperation();
try {
synchronized (innerScanner) {
do {
ungroupedAggregateRegionObserver.checkForRegionClosingOrSplitting();
List<Cell> results = useQualifierAsIndex
? new EncodedColumnQualiferCellsList(minMaxQualifiers.getFirst(),
minMaxQualifiers.getSecond(), encodingScheme)
: new ArrayList<Cell>();
// Results are potentially returned even when the return value of s.next is false
// since this is an indication of whether or not there are more values after the
// ones returned
hasMore = (scannerContext == null)
? innerScanner.nextRaw(results)
: innerScanner.nextRaw(results, scannerContext);
if (isDummy(results)) {
if (!hasAny) {
resultsToReturn.addAll(results);
return true;
}
// we got a page timeout from the lower scanner but hasAny is true which means that
// we have a valid result which we can return to the client instead of a dummy but we
// still need to finish the rpc and release the handler
PhoenixScannerContext.setReturnImmediately(scannerContext);
break;
}
if (!results.isEmpty()) {
lastCell = results.get(0);
result.setKeyValues(results);
if (isDescRowKeyOrderUpgrade) {
if (!descRowKeyOrderUpgrade(results, ptr, mutations)) {
continue;
}
} else if (buildLocalIndex) {
buildLocalIndex(result, results, ptr);
} else if (isDelete) {
deleteRow(results, mutations);
} else if (isUpsert) {
upsert(result, ptr, mutations);
} else if (deleteCF != null && deleteCQ != null) {
deleteCForQ(result, results, mutations);
}
if (emptyCF != null) {
/*
* If we've specified an emptyCF, then we need to insert an empty key value
* "retroactively" for any key value that is visible at the timestamp that the DDL
* was issued. Key values that are not visible at this timestamp will not ever be
* projected up to scans past this timestamp, so don't need to be considered. We
* insert one empty key value per row per timestamp.
*/
insertEmptyKeyValue(results, mutations);
}
if (
ServerUtil.readyToCommit(mutations.size(), mutations.byteSize(), maxBatchSize,
maxBatchSizeBytes)
) {
if (!isSingleRowDelete) {
annotateAndCommit(mutations);
} else {
atomicSingleRowDeleteResult = annotateCommitAndReturnResult(mutations);
}
}
// Commit in batches based on UPSERT_BATCH_SIZE_BYTES_ATTRIB in config
if (
ServerUtil.readyToCommit(indexMutations.size(), indexMutations.byteSize(),
maxBatchSize, maxBatchSizeBytes)
) {
setIndexAndTransactionProperties(indexMutations, indexUUID, indexMaintainersPtr,
txState, clientVersionBytes, useIndexProto);
ungroupedAggregateRegionObserver.commitBatch(region, indexMutations,
blockingMemStoreSize);
indexMutations.clear();
}
aggregators.aggregate(rowAggregators, result);
hasAny = true;
}
if (
PhoenixScannerContext.isReturnImmediately(scannerContext)
|| PhoenixScannerContext.isTimedOut(scannerContext, pageSizeMs)
) {
// we could have a valid result which we can return to the client instead of a dummy,
// but we still need to finish the rpc and release the handler
PhoenixScannerContext.setReturnImmediately(scannerContext);
break;
}
} while (hasMore);
if (!mutations.isEmpty()) {
if (!isSingleRowDelete) {
annotateAndCommit(mutations);
} else {
atomicSingleRowDeleteResult = annotateCommitAndReturnResult(mutations);
}
}
if (!indexMutations.isEmpty()) {
ungroupedAggregateRegionObserver.commitBatch(region, indexMutations,
blockingMemStoreSize);
indexMutations.clear();
}
}
} catch (InsufficientMemoryException e) {
throw new DoNotRetryIOException(e);
} catch (DataExceedsCapacityException e) {
throw new DoNotRetryIOException(e.getMessage(), e);
} catch (Throwable e) {
LOGGER.error("Exception in UngroupedAggregateRegionScanner for region "
+ region.getRegionInfo().getRegionNameAsString(), e);
throw e;
} finally {
region.closeRegionOperation();
}
Cell keyValue;
if (hasAny) {
final byte[] value;
if (isSingleRowDelete && atomicSingleRowDeleteResult != null) {
resultsToReturn.addAll(atomicSingleRowDeleteResult.listCells());
return hasMore;
} else {
value = aggregators.toBytes(rowAggregators);
}
if (pageSizeMs == Long.MAX_VALUE) {
byte[] rowKey;
final boolean isIncompatibleClient =
ScanUtil.isIncompatibleClientForServerReturnValidRowKey(scan);
if (!isIncompatibleClient) {
rowKey = CellUtil.cloneRow(lastCell);
} else {
// Paging is not set. To be compatible with older clients, do not set the row key
rowKey = UNGROUPED_AGG_ROW_KEY;
}
keyValue = PhoenixKeyValueUtil.newKeyValue(rowKey, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN,
AGG_TIMESTAMP, value, 0, value.length);
} else {
keyValue = PhoenixKeyValueUtil.newKeyValue(CellUtil.cloneRow(lastCell),
SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, AGG_TIMESTAMP, value, 0, value.length);
}
resultsToReturn.add(keyValue);
}
return hasMore;
}
}
private void annotateAndCommit(UngroupedAggregateRegionObserver.MutationList mutations)
throws IOException {
annotateMutations(mutations);
ungroupedAggregateRegionObserver.commit(region, mutations, indexUUID, blockingMemStoreSize,
indexMaintainersPtr, txState, targetHTable, useIndexProto, isPKChanging, clientVersionBytes);
mutations.clear();
}
/**
* Similar to {@link #annotateAndCommit(UngroupedAggregateRegionObserver.MutationList)} but only
* meant for single row atomic delete mutation that requires returning the result if the row is
* deleted atomically.
* @param mutations Mutation list.
* @return Result to be returned.
* @throws IOException If something goes wrong with the operation.
*/
private Result annotateCommitAndReturnResult(
UngroupedAggregateRegionObserver.MutationList mutations) throws IOException {
annotateMutations(mutations);
Result result = ungroupedAggregateRegionObserver.commitWithResultReturned(mutations, indexUUID,
indexMaintainersPtr, txState, targetHTable, useIndexProto, clientVersionBytes);
mutations.clear();
return result;
}
/**
* Annotate the give mutations as per the scan attributes.
* @param mutations The mutations that need to be annotated.
*/
private void annotateMutations(UngroupedAggregateRegionObserver.MutationList mutations) {
annotateDataMutations(mutations, scan);
if (isDelete || isUpsert) {
annotateDataMutationsWithExternalSchemaId(mutations, scan);
}
}
@Override
public long getMaxResultSize() {
return scan.getMaxResultSize();
}
private void annotateDataMutations(UngroupedAggregateRegionObserver.MutationList mutationsList,
Scan scan) {
byte[] tenantId = scan.getAttribute(MutationState.MutationMetadataType.TENANT_ID.toString());
byte[] schemaName =
scan.getAttribute(MutationState.MutationMetadataType.SCHEMA_NAME.toString());
byte[] logicalTableName =
scan.getAttribute(MutationState.MutationMetadataType.LOGICAL_TABLE_NAME.toString());
byte[] tableType = scan.getAttribute(MutationState.MutationMetadataType.TABLE_TYPE.toString());
byte[] ddlTimestamp =
scan.getAttribute(MutationState.MutationMetadataType.TIMESTAMP.toString());
for (Mutation m : mutationsList) {
annotateMutation(m, tenantId, schemaName, logicalTableName, tableType, ddlTimestamp);
}
}
private void annotateDataMutationsWithExternalSchemaId(
UngroupedAggregateRegionObserver.MutationList mutationsList, Scan scan) {
byte[] externalSchemaRegistryId =
scan.getAttribute(MutationState.MutationMetadataType.EXTERNAL_SCHEMA_ID.toString());
for (Mutation m : mutationsList) {
annotateMutation(m, externalSchemaRegistryId);
}
}
}
|
apache/maven-resolver | 38,177 | maven-resolver-api/src/main/java/org/eclipse/aether/RepositorySystemSession.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.eclipse.aether;
import java.io.Closeable;
import java.nio.file.Path;
import java.util.Collection;
import java.util.Map;
import java.util.function.Supplier;
import org.eclipse.aether.artifact.ArtifactTypeRegistry;
import org.eclipse.aether.collection.DependencyGraphTransformer;
import org.eclipse.aether.collection.DependencyManager;
import org.eclipse.aether.collection.DependencySelector;
import org.eclipse.aether.collection.DependencyTraverser;
import org.eclipse.aether.collection.VersionFilter;
import org.eclipse.aether.repository.AuthenticationSelector;
import org.eclipse.aether.repository.LocalRepository;
import org.eclipse.aether.repository.LocalRepositoryManager;
import org.eclipse.aether.repository.MirrorSelector;
import org.eclipse.aether.repository.ProxySelector;
import org.eclipse.aether.repository.RemoteRepository;
import org.eclipse.aether.repository.RepositoryPolicy;
import org.eclipse.aether.repository.WorkspaceReader;
import org.eclipse.aether.resolution.ArtifactDescriptorPolicy;
import org.eclipse.aether.resolution.ResolutionErrorPolicy;
import org.eclipse.aether.scope.ScopeManager;
import org.eclipse.aether.scope.SystemDependencyScope;
import org.eclipse.aether.transfer.TransferListener;
/**
* Defines settings and components that control the repository system. Once initialized, the session object itself is
* supposed to be immutable and hence can safely be shared across an entire application and any concurrent threads
* reading it. Components that wish to tweak some aspects of an existing session should use the copy constructor of
* {@link DefaultRepositorySystemSession} and its mutators to derive a custom session.
*
* @noimplement This interface is not intended to be implemented by clients.
* @noextend This interface is not intended to be extended by clients.
*/
public interface RepositorySystemSession {
/**
* Immutable session that is closeable, should be handled as a resource. These session instances can be
* created with {@link SessionBuilder}.
*
* @noimplement This interface is not intended to be implemented by clients.
* @noextend This interface is not intended to be extended by clients.
*
* @since 2.0.0
*/
interface CloseableSession extends RepositorySystemSession, Closeable {
/**
* Returns the ID of this closeable session instance. Each closeable session has different ID, unique within
* repository system they were created with.
*
* @return The session ID that is never {@code null}.
*/
String sessionId();
/**
* Closes the session. The session should be closed by its creator. A closed session should not be used anymore.
* This method may be invoked multiple times, but close will act only once (first time).
*/
@Override
void close();
}
/**
* Builder for building {@link CloseableSession} instances. Builder instances can be created with
* {@link RepositorySystem#createSessionBuilder()} method. Instances are not thread-safe nor immutable.
* <p>
* Important: if you set a stateful member on builder (for example {@link SessionData} or {@link RepositoryCache}),
* the builder will create session instances using same provided stateful members, that may lead to unexpected side
* effects. Solution for these cases is to not reuse builder instances, or, keep reconfiguring it, or ultimately
* provide suppliers that create new instance per each call.
*
* @noimplement This interface is not intended to be implemented by clients.
* @noextend This interface is not intended to be extended by clients.
*
* @since 2.0.0
*/
interface SessionBuilder {
/**
* Controls whether the repository system operates in offline mode and avoids/refuses any access to remote
* repositories.
*
* @param offline {@code true} if the repository system is in offline mode, {@code false} otherwise.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setOffline(boolean offline);
/**
* Controls whether repositories declared in artifact descriptors should be ignored during transitive dependency
* collection. If enabled, only the repositories originally provided with the collect request will be considered.
*
* @param ignoreArtifactDescriptorRepositories {@code true} to ignore additional repositories from artifact
* descriptors, {@code false} to merge those with the originally
* specified repositories.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setIgnoreArtifactDescriptorRepositories(boolean ignoreArtifactDescriptorRepositories);
/**
* Sets the policy which controls whether resolutions errors from remote repositories should be cached.
*
* @param resolutionErrorPolicy The resolution error policy for this session, may be {@code null} if resolution
* errors should generally not be cached.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setResolutionErrorPolicy(ResolutionErrorPolicy resolutionErrorPolicy);
/**
* Sets the policy which controls how errors related to reading artifact descriptors should be handled.
*
* @param artifactDescriptorPolicy The descriptor error policy for this session, may be {@code null} if descriptor
* errors should generally not be tolerated.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setArtifactDescriptorPolicy(ArtifactDescriptorPolicy artifactDescriptorPolicy);
/**
* Sets the global checksum policy. If set, the global checksum policy overrides the checksum policies of the remote
* repositories being used for resolution.
*
* @param checksumPolicy The global checksum policy, may be {@code null}/empty to apply the per-repository policies.
* @return This session for chaining, never {@code null}.
* @see RepositoryPolicy#CHECKSUM_POLICY_FAIL
* @see RepositoryPolicy#CHECKSUM_POLICY_IGNORE
* @see RepositoryPolicy#CHECKSUM_POLICY_WARN
*/
SessionBuilder setChecksumPolicy(String checksumPolicy);
/**
* Sets the global update policy. If set, the global update policy overrides the update policies of the remote
* repositories being used for resolution.
* <p>
* This method is meant for code that does not want to distinguish between artifact and metadata policies.
* Note: applications should either use get/set updatePolicy (this method and
* {@link RepositorySystemSession#getUpdatePolicy()}) or also distinguish between artifact and
* metadata update policies (and use other methods), but <em>should not mix the two!</em>
*
* @param updatePolicy The global update policy, may be {@code null}/empty to apply the per-repository policies.
* @return This session for chaining, never {@code null}.
* @see RepositoryPolicy#UPDATE_POLICY_ALWAYS
* @see RepositoryPolicy#UPDATE_POLICY_DAILY
* @see RepositoryPolicy#UPDATE_POLICY_NEVER
* @see #setArtifactUpdatePolicy(String)
* @see #setMetadataUpdatePolicy(String)
*/
SessionBuilder setUpdatePolicy(String updatePolicy);
/**
* Sets the global artifact update policy. If set, the global update policy overrides the artifact update policies
* of the remote repositories being used for resolution.
*
* @param artifactUpdatePolicy The global update policy, may be {@code null}/empty to apply the per-repository policies.
* @return This session for chaining, never {@code null}.
* @see RepositoryPolicy#UPDATE_POLICY_ALWAYS
* @see RepositoryPolicy#UPDATE_POLICY_DAILY
* @see RepositoryPolicy#UPDATE_POLICY_NEVER
* @since 2.0.0
*/
SessionBuilder setArtifactUpdatePolicy(String artifactUpdatePolicy);
/**
* Sets the global metadata update policy. If set, the global update policy overrides the metadata update policies
* of the remote repositories being used for resolution.
*
* @param metadataUpdatePolicy The global update policy, may be {@code null}/empty to apply the per-repository policies.
* @return This session for chaining, never {@code null}.
* @see RepositoryPolicy#UPDATE_POLICY_ALWAYS
* @see RepositoryPolicy#UPDATE_POLICY_DAILY
* @see RepositoryPolicy#UPDATE_POLICY_NEVER
* @since 2.0.0
*/
SessionBuilder setMetadataUpdatePolicy(String metadataUpdatePolicy);
/**
* Sets the local repository manager used during this session. <em>Note:</em> Eventually, a valid session must have
* a local repository manager set.
* <p>
* The provisioning of {@link org.eclipse.aether.repository.LocalRepositoryManager} for use with this
* method introduces chicken and egg situation. Integrators MUST NOT use this method, but instead, hook into
* Local Repository Manager Provider by any means they can (ie by using Provider or Sisu Components) and use
* custom string and/or priorities instead. This method existence is not meant for "everyday use" (normal
* session creation), but for some more advanced use cases. Do not use it, unless you know what are you doing.
*
* @param localRepositoryManager The local repository manager used during this session, may be {@code null}.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setLocalRepositoryManager(LocalRepositoryManager localRepositoryManager);
/**
* Sets the workspace reader used during this session. If set, the workspace reader will usually be consulted first
* to resolve artifacts.
*
* @param workspaceReader The workspace reader for this session, may be {@code null} if none.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setWorkspaceReader(WorkspaceReader workspaceReader);
/**
* Sets the listener being notified of actions in the repository system.
*
* @param repositoryListener The repository listener, may be {@code null} if none.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setRepositoryListener(RepositoryListener repositoryListener);
/**
* Sets the listener being notified of uploads/downloads by the repository system.
*
* @param transferListener The transfer listener, may be {@code null} if none.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setTransferListener(TransferListener transferListener);
/**
* Sets the system properties to use, e.g. for processing of artifact descriptors. System properties are usually
* collected from the runtime environment like {@link System#getProperties()} and environment variables.
* <p>
* <em>Note:</em> System properties are of type {@code Map<String, String>} and any key-value pair in the input map
* that doesn't match this type will be silently ignored.
*
* @param systemProperties The system properties, may be {@code null} or empty if none.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setSystemProperties(Map<?, ?> systemProperties);
/**
* Sets the specified system property.
*
* @param key The property key, must not be {@code null}.
* @param value The property value, may be {@code null} to remove/unset the property.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setSystemProperty(String key, String value);
/**
* Sets the user properties to use, e.g. for processing of artifact descriptors. User properties are similar to
* system properties but are set on the discretion of the user and hence are considered of higher priority than
* system properties in case of conflicts.
* <p>
* <em>Note:</em> User properties are of type {@code Map<String, String>} and any key-value pair in the input map
* that doesn't match this type will be silently ignored.
*
* @param userProperties The user properties, may be {@code null} or empty if none.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setUserProperties(Map<?, ?> userProperties);
/**
* Sets the specified user property.
*
* @param key The property key, must not be {@code null}.
* @param value The property value, may be {@code null} to remove/unset the property.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setUserProperty(String key, String value);
/**
* Sets the configuration properties used to tweak internal aspects of the repository system (e.g. thread pooling,
* connector-specific behavior, etc.).
* <p>
* <em>Note:</em> Configuration properties are of type {@code Map<String, Object>} and any key-value pair in the
* input map that doesn't match this type will be silently ignored.
*
* @param configProperties The configuration properties, may be {@code null} or empty if none.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setConfigProperties(Map<?, ?> configProperties);
/**
* Sets the specified configuration property.
*
* @param key The property key, must not be {@code null}.
* @param value The property value, may be {@code null} to remove/unset the property.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setConfigProperty(String key, Object value);
/**
* Sets the mirror selector to use for repositories discovered in artifact descriptors. Note that this selector is
* not used for remote repositories which are passed as request parameters to the repository system, those
* repositories are supposed to denote the effective repositories.
*
* @param mirrorSelector The mirror selector to use, may be {@code null}.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setMirrorSelector(MirrorSelector mirrorSelector);
/**
* Sets the proxy selector to use for repositories discovered in artifact descriptors. Note that this selector is
* not used for remote repositories which are passed as request parameters to the repository system, those
* repositories are supposed to have their proxy (if any) already set.
*
* @param proxySelector The proxy selector to use, may be {@code null}.
* @return This session for chaining, never {@code null}.
* @see RemoteRepository#getProxy()
*/
SessionBuilder setProxySelector(ProxySelector proxySelector);
/**
* Sets the authentication selector to use for repositories discovered in artifact descriptors. Note that this
* selector is not used for remote repositories which are passed as request parameters to the repository system,
* those repositories are supposed to have their authentication (if any) already set.
*
* @param authenticationSelector The authentication selector to use, may be {@code null}.
* @return This session for chaining, never {@code null}.
* @see RemoteRepository#getAuthentication()
*/
SessionBuilder setAuthenticationSelector(AuthenticationSelector authenticationSelector);
/**
* Sets the registry of artifact types recognized by this session.
*
* @param artifactTypeRegistry The artifact type registry, may be {@code null}.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setArtifactTypeRegistry(ArtifactTypeRegistry artifactTypeRegistry);
/**
* Sets the dependency traverser to use for building dependency graphs.
*
* @param dependencyTraverser The dependency traverser to use for building dependency graphs, may be {@code null}.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setDependencyTraverser(DependencyTraverser dependencyTraverser);
/**
* Sets the dependency manager to use for building dependency graphs.
*
* @param dependencyManager The dependency manager to use for building dependency graphs, may be {@code null}.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setDependencyManager(DependencyManager dependencyManager);
/**
* Sets the dependency selector to use for building dependency graphs.
*
* @param dependencySelector The dependency selector to use for building dependency graphs, may be {@code null}.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setDependencySelector(DependencySelector dependencySelector);
/**
* Sets the version filter to use for building dependency graphs.
*
* @param versionFilter The version filter to use for building dependency graphs, may be {@code null} to not filter
* versions.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setVersionFilter(VersionFilter versionFilter);
/**
* Sets the dependency graph transformer to use for building dependency graphs.
*
* @param dependencyGraphTransformer The dependency graph transformer to use for building dependency graphs, may be
* {@code null}.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setDependencyGraphTransformer(DependencyGraphTransformer dependencyGraphTransformer);
/**
* Sets the custom data associated with this session.
* Note: When this method used to set instance, same passed instance will be used for every built session out
* of this builder instance, hence the built sessions will share these instances as well!
*
* @param data The session data, may be {@code null}.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setData(SessionData data);
/**
* Sets the cache the repository system may use to save data for future reuse during the session.
* Note: When this method used to set instance, same passed instance will be used for every built session out
* of this builder instance, hence the built sessions will share these instances as well!
*
* @param cache The repository cache, may be {@code null} if none.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setCache(RepositoryCache cache);
/**
* Sets the scope manager for session, may be {@code null}.
*
* @param scopeManager The scope manager, may be {@code null}.
* @return The session for chaining, never {@code null}.
*/
SessionBuilder setScopeManager(ScopeManager scopeManager);
/**
* Adds on session ended handler to be immediately registered when this builder creates session.
*
* @param handler The on session ended handler, may not be {@code null}.
* @return The session for chaining, never {@code null}.
*/
SessionBuilder addOnSessionEndedHandler(Runnable handler);
/**
* Sets the custom session data supplier associated with this session.
* Note: The supplier will be used for every built session out of this builder instance, so if supplier supplies
* <em>same instance</em> the built sessions will share these instances as well!
*
* @param dataSupplier The session data supplier, may not be {@code null}.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setSessionDataSupplier(Supplier<SessionData> dataSupplier);
/**
* Sets the cache supplier for the repository system may use to save data for future reuse during the session.
* Note: The supplier will be used for every built session out of this builder instance, so if supplier supplies
* <em>same instance</em> the built sessions will share these instances as well!
*
* @param cacheSupplier The repository cache supplier, may not be {@code null}.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder setRepositoryCacheSupplier(Supplier<RepositoryCache> cacheSupplier);
/**
* Shortcut method to set up local repository manager directly onto builder. There must be at least one non-null
* {@link Path} passed in this method. In case multiple files, session builder will use chained local repository
* manager.
*
* @param baseDirectories The local repository base directories.
* @return This session for chaining, never {@code null}.
* @see #withLocalRepositories(LocalRepository...)
*/
SessionBuilder withLocalRepositoryBaseDirectories(Path... baseDirectories);
/**
* Shortcut method to set up local repository manager directly onto builder. There must be at least one non-null
* {@link Path} present in passed in list. In case multiple files, session builder will use chained local
* repository manager.
*
* @param baseDirectories The local repository base directories.
* @return This session for chaining, never {@code null}.
* @see #withLocalRepositories(Collection)
*/
SessionBuilder withLocalRepositoryBaseDirectories(Collection<Path> baseDirectories);
/**
* Shortcut method to set up local repository manager directly onto builder. There must be at least one non-null
* {@link LocalRepository} passed in this method. In case multiple local repositories, session builder will
* use chained local repository manager.
*
* @param localRepositories The local repositories.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder withLocalRepositories(LocalRepository... localRepositories);
/**
* Shortcut method to set up local repository manager directly onto builder. There must be at least one non-null
* {@link LocalRepository} present in passed in list. In case multiple local repositories, session builder will
* use chained local repository manager.
*
* @param localRepositories The local repositories.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder withLocalRepositories(Collection<LocalRepository> localRepositories);
/**
* Adds the listeners to be notified of actions in the repository system.
*
* @param repositoryListeners The repository listeners, never {@code null}.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder withRepositoryListener(RepositoryListener... repositoryListeners);
/**
* Adds the listeners to be notified of actions in the repository system.
*
* @param repositoryListeners The repository listeners, never {@code null}.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder withRepositoryListener(Collection<RepositoryListener> repositoryListeners);
/**
* Adds the listener to be notified of uploads/downloads by the repository system.
*
* @param transferListeners The transfer listeners, never {@code null}.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder withTransferListener(TransferListener... transferListeners);
/**
* Adds the listener to be notified of uploads/downloads by the repository system.
*
* @param transferListeners The transfer listeners, never {@code null}.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder withTransferListener(Collection<TransferListener> transferListeners);
/**
* Shortcut method to shallow-copy passed in session into current builder.
*
* @param session The session to shallow-copy from.
* @return This session for chaining, never {@code null}.
*/
SessionBuilder withRepositorySystemSession(RepositorySystemSession session);
/**
* Creates immutable closeable session out this builder instance.
*
* @return Immutable closeable session, never {@code null}.
*/
CloseableSession build();
}
/**
* Indicates whether the repository system operates in offline mode and avoids/refuses any access to remote
* repositories.
*
* @return {@code true} if the repository system is in offline mode, {@code false} otherwise.
*/
boolean isOffline();
/**
* Indicates whether repositories declared in artifact descriptors should be ignored during transitive dependency
* collection. If enabled, only the repositories originally provided with the collect request will be considered.
*
* @return {@code true} if additional repositories from artifact descriptors are ignored, {@code false} to merge
* those with the originally specified repositories.
*/
boolean isIgnoreArtifactDescriptorRepositories();
/**
* Gets the policy which controls whether resolutions errors from remote repositories should be cached.
*
* @return The resolution error policy for this session or {@code null} if resolution errors should generally not be
* cached.
*/
ResolutionErrorPolicy getResolutionErrorPolicy();
/**
* Gets the policy which controls how errors related to reading artifact descriptors should be handled.
*
* @return The descriptor error policy for this session or {@code null} if descriptor errors should generally not be
* tolerated.
*/
ArtifactDescriptorPolicy getArtifactDescriptorPolicy();
/**
* Gets the global checksum policy. If set, the global checksum policy overrides the checksum policies of the remote
* repositories being used for resolution.
*
* @return The global checksum policy or {@code null}/empty if not set and the per-repository policies apply.
* @see RepositoryPolicy#CHECKSUM_POLICY_FAIL
* @see RepositoryPolicy#CHECKSUM_POLICY_IGNORE
* @see RepositoryPolicy#CHECKSUM_POLICY_WARN
*/
String getChecksumPolicy();
/**
* Gets the global update policy, or {@code null} if not set.
* <p>
* This method is meant for code that does not want to distinguish between artifact and metadata policies.
* Note: applications should either use get/set updatePolicy (this method and
* {@link DefaultRepositorySystemSession#setUpdatePolicy(String)}) or also distinguish between artifact and
* metadata update policies (and use other methods), but <em>should not mix the two!</em>
*
* @see #getArtifactUpdatePolicy()
* @see #getMetadataUpdatePolicy()
*/
String getUpdatePolicy();
/**
* Gets the global artifact update policy. If set, the global update policy overrides the update policies of the
* remote repositories being used for resolution.
*
* @return The global update policy or {@code null}/empty if not set and the per-repository policies apply.
* @see RepositoryPolicy#UPDATE_POLICY_ALWAYS
* @see RepositoryPolicy#UPDATE_POLICY_DAILY
* @see RepositoryPolicy#UPDATE_POLICY_NEVER
* @since 2.0.0
*/
String getArtifactUpdatePolicy();
/**
* Gets the global metadata update policy. If set, the global update policy overrides the update policies of the remote
* repositories being used for resolution.
*
* @return The global update policy or {@code null}/empty if not set and the per-repository policies apply.
* @see RepositoryPolicy#UPDATE_POLICY_ALWAYS
* @see RepositoryPolicy#UPDATE_POLICY_DAILY
* @see RepositoryPolicy#UPDATE_POLICY_NEVER
* @since 2.0.0
*/
String getMetadataUpdatePolicy();
/**
* Gets the local repository used during this session. This is a convenience method for
* {@link LocalRepositoryManager#getRepository()}.
*
* @return The local repository being during this session, never {@code null}.
*/
LocalRepository getLocalRepository();
/**
* Gets the local repository manager used during this session.
*
* @return The local repository manager used during this session, never {@code null}.
*/
LocalRepositoryManager getLocalRepositoryManager();
/**
* Gets the workspace reader used during this session. If set, the workspace reader will usually be consulted first
* to resolve artifacts.
*
* @return The workspace reader for this session or {@code null} if none.
*/
WorkspaceReader getWorkspaceReader();
/**
* Gets the listener being notified of actions in the repository system.
*
* @return The repository listener or {@code null} if none.
*/
RepositoryListener getRepositoryListener();
/**
* Gets the listener being notified of uploads/downloads by the repository system.
*
* @return The transfer listener or {@code null} if none.
*/
TransferListener getTransferListener();
/**
* Gets the system properties to use, e.g. for processing of artifact descriptors. System properties are usually
* collected from the runtime environment like {@link System#getProperties()} and environment variables.
*
* @return The (read-only) system properties, never {@code null}.
*/
Map<String, String> getSystemProperties();
/**
* Gets the user properties to use, e.g. for processing of artifact descriptors. User properties are similar to
* system properties but are set on the discretion of the user and hence are considered of higher priority than
* system properties.
*
* @return The (read-only) user properties, never {@code null}.
*/
Map<String, String> getUserProperties();
/**
* Gets the configuration properties used to tweak internal aspects of the repository system (e.g. thread pooling,
* connector-specific behavior, etc.)
*
* @return The (read-only) configuration properties, never {@code null}.
* @see ConfigurationProperties
*/
Map<String, Object> getConfigProperties();
/**
* Gets the mirror selector to use for repositories discovered in artifact descriptors. Note that this selector is
* not used for remote repositories which are passed as request parameters to the repository system, those
* repositories are supposed to denote the effective repositories.
*
* @return The mirror selector to use, never {@code null}.
* @see RepositorySystem#newResolutionRepositories(RepositorySystemSession, java.util.List)
*/
MirrorSelector getMirrorSelector();
/**
* Gets the proxy selector to use for repositories discovered in artifact descriptors. Note that this selector is
* not used for remote repositories which are passed as request parameters to the repository system, those
* repositories are supposed to have their proxy (if any) already set.
*
* @return The proxy selector to use, never {@code null}.
* @see org.eclipse.aether.repository.RemoteRepository#getProxy()
* @see RepositorySystem#newResolutionRepositories(RepositorySystemSession, java.util.List)
*/
ProxySelector getProxySelector();
/**
* Gets the authentication selector to use for repositories discovered in artifact descriptors. Note that this
* selector is not used for remote repositories which are passed as request parameters to the repository system,
* those repositories are supposed to have their authentication (if any) already set.
*
* @return The authentication selector to use, never {@code null}.
* @see org.eclipse.aether.repository.RemoteRepository#getAuthentication()
* @see RepositorySystem#newResolutionRepositories(RepositorySystemSession, java.util.List)
*/
AuthenticationSelector getAuthenticationSelector();
/**
* Gets the registry of artifact types recognized by this session, for instance when processing artifact
* descriptors.
*
* @return The artifact type registry, never {@code null}.
*/
ArtifactTypeRegistry getArtifactTypeRegistry();
/**
* Gets the dependency traverser to use for building dependency graphs.
*
* @return The dependency traverser to use for building dependency graphs or {@code null} if dependencies are
* unconditionally traversed.
*/
DependencyTraverser getDependencyTraverser();
/**
* Gets the dependency manager to use for building dependency graphs.
*
* @return The dependency manager to use for building dependency graphs or {@code null} if dependency management is
* not performed.
*/
DependencyManager getDependencyManager();
/**
* Gets the dependency selector to use for building dependency graphs.
*
* @return The dependency selector to use for building dependency graphs or {@code null} if dependencies are
* unconditionally included.
*/
DependencySelector getDependencySelector();
/**
* Gets the version filter to use for building dependency graphs.
*
* @return The version filter to use for building dependency graphs or {@code null} if versions aren't filtered.
*/
VersionFilter getVersionFilter();
/**
* Gets the dependency graph transformer to use for building dependency graphs.
*
* @return The dependency graph transformer to use for building dependency graphs or {@code null} if none.
*/
DependencyGraphTransformer getDependencyGraphTransformer();
/**
* Gets the custom data associated with this session.
*
* @return The session data, never {@code null}.
*/
SessionData getData();
/**
* Gets the cache the repository system may use to save data for future reuse during the session.
*
* @return The repository cache or {@code null} if none.
*/
RepositoryCache getCache();
/**
* Returns the scope manager to be used in this session, may be {@code null} if not set.
*
* @return The scope manager or {@code null} if not set.
* @since 2.0.0
*/
ScopeManager getScopeManager();
/**
* Returns the system dependency scope.
* <p>
* Shorthand method for {@link ScopeManager#getSystemDependencyScope()}.
* <p>
* If {@link ScopeManager} is set, {@link #getScopeManager()} returns non-null value, the result of
* {@link ScopeManager#getSystemDependencyScope()} is returned (that may be {@code null}). If no {@link ScopeManager}
* if set, then {@link SystemDependencyScope#LEGACY} instance is returned, as lack of scope manager means that
* resolver operates in "legacy" mode (Maven3 compatible mode).
*
* @return The system dependency scope or {@code null} if no such scope.
* @since 2.0.0
*/
SystemDependencyScope getSystemDependencyScope();
/**
* Registers a handler to execute when this session closed.
* <p>
* Note: Resolver 1.x sessions will not be able to register handlers. Migrate to Resolver 2.x way of handling
* sessions to make full use of new features. New features (like HTTP/2 transport) depend on this functionality.
* While they will function with Resolver 1.x sessions, they may produce resource leaks.
*
* @param handler the handler, never {@code null}.
* @return {@code true} if handler successfully registered, {@code false} otherwise.
* @since 2.0.0
*/
boolean addOnSessionEndedHandler(Runnable handler);
}
|
apache/rya | 37,768 | extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/freetext/query/QueryParserConstants.java | /* Generated By:JJTree&JavaCC: Do not edit this line. QueryParserConstants.java */
package org.apache.rya.indexing.accumulo.freetext.query;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* Token literal values and constants.
* Generated by org.javacc.parser.OtherFilesGen#start()
*/
public interface QueryParserConstants {
/** End of File. */
int EOF = 0;
/** RegularExpression Id. */
int AND = 5;
/** RegularExpression Id. */
int OR = 6;
/** RegularExpression Id. */
int NOT = 7;
/** RegularExpression Id. */
int LPAREN = 8;
/** RegularExpression Id. */
int RPAREN = 9;
/** RegularExpression Id. */
int QUOTED = 10;
/** RegularExpression Id. */
int _QUOTED_CHAR = 11;
/** RegularExpression Id. */
int TERM = 12;
/** RegularExpression Id. */
int PREFIXTERM = 13;
/** RegularExpression Id. */
int WILDTERM = 14;
/** RegularExpression Id. */
int _TERM_CHAR = 15;
/** Lexical state. */
int DEFAULT = 0;
/** Literal token values. */
String[] tokenImage = {
"<EOF>",
"\" \"",
"\"\\t\"",
"\"\\n\"",
"\"\\r\"",
"<AND>",
"<OR>",
"<NOT>",
"\"(\"",
"\")\"",
"<QUOTED>",
"<_QUOTED_CHAR>",
"<TERM>",
"<PREFIXTERM>",
"<WILDTERM>",
"<_TERM_CHAR>",
};
}
|
apache/juneau | 35,788 | juneau-utest/src/test/java/org/apache/juneau/objecttools/ObjectRest_Test.java | // ***************************************************************************************************************************
// * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file *
// * distributed with this work for additional information regarding copyright ownership. The ASF licenses this file *
// * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance *
// * with the License. You may obtain a copy of the License at *
// * *
// * http://www.apache.org/licenses/LICENSE-2.0 *
// * *
// * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an *
// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the *
// * specific language governing permissions and limitations under the License. *
// ***************************************************************************************************************************
package org.apache.juneau.objecttools;
import static org.apache.juneau.TestUtils.*;
import static org.junit.jupiter.api.Assertions.*;
import java.util.*;
import java.util.function.*;
import org.apache.juneau.*;
import org.apache.juneau.annotation.*;
import org.apache.juneau.collections.*;
import org.apache.juneau.json.*;
import org.apache.juneau.parser.*;
import org.junit.jupiter.api.*;
@SuppressWarnings({"rawtypes","serial"})
class ObjectRest_Test extends TestBase {
//====================================================================================================
// testBasic
//====================================================================================================
@Test void a01_basic() {
var model = ObjectRest.create(new JsonMap()); // An empty model.
// Do a PUT
model.put("A", new JsonMap());
model.put("A/B", new JsonMap());
model.put("A/B/C", "A new string");
assertEquals("{A:{B:{C:'A new string'}}}", model.toString());
// Do a POST to a list.
model.put("A/B/C", new LinkedList());
model.post("A/B/C", "String #1");
model.post("A/B/C", "String #2");
assertEquals("{A:{B:{C:['String #1\','String #2']}}}", model.toString());
// Do some GETs
var s = (String) model.get("A/B/C/0");
assertEquals("String #1", s);
var m = (Map) model.get("A/B");
assertEquals("{C:['String #1','String #2']}", m.toString());
}
//====================================================================================================
// testBeans
//====================================================================================================
@Test void b01_beans() throws Exception {
var model = ObjectRest.create(new JsonMap());
// Java beans.
var p = new Person("some name", 123,
new Address("street A", "city A", "state A", 12345, true),
new Address("street B", "city B", "state B", 12345, false)
);
model.put("/person1", p);
// Make sure it got stored correctly.
var serializer = JsonSerializer.create().json5().addRootType().build();
assertEquals("{person1:{name:'some name',age:123,addresses:[{street:'street A',city:'city A',state:'state A',zip:12345,isCurrent:true},{street:'street B',city:'city B',state:'state B',zip:12345,isCurrent:false}]}}", serializer.serialize(model.getRootObject()));
// Get the original Person object back.
p = (Person)model.get("/person1");
assertEquals("city B", p.addresses[1].city);
// Look for deep information inside beans.
var a3 = (Address)model.get("/person1/addresses/1");
assertEquals("city B", a3.city);
serializer = Json5Serializer.DEFAULT.copy().addBeanTypes().addRootType().build();
p = new Person("some name", 123,
new Address("street A", "city A", "state A", 12345, true),
new Address("street B", "city B", "state B", 12345, false)
);
// Serialize it to JSON.
var s = serializer.serialize(p);
var expectedValue = "{_type:'Person',name:'some name',age:123,addresses:[{street:'street A',city:'city A',state:'state A',zip:12345,isCurrent:true},{street:'street B',city:'city B',state:'state B',zip:12345,isCurrent:false}]}";
assertEquals(expectedValue, s);
// Parse it back to Java objects.
p = (Person)JsonParser.create().beanDictionary(Person.class).build().parse(s, Object.class);
expectedValue = "city B";
s = p.addresses[1].city;
assertEquals(expectedValue, s);
// Parse it back into JSON again.
s = serializer.serialize(p);
expectedValue = "{_type:'Person',name:'some name',age:123,addresses:[{street:'street A',city:'city A',state:'state A',zip:12345,isCurrent:true},{street:'street B',city:'city B',state:'state B',zip:12345,isCurrent:false}]}";
assertEquals(expectedValue, s);
// Try adding an address
model = ObjectRest.create(p);
model.post("addresses", new Address("street C", "city C", "state C", 12345, true));
s = ((Address)model.get("addresses/2")).toString();
expectedValue = "Address(street=street C,city=city C,state=state C,zip=12345,isCurrent=true)";
assertEquals(expectedValue, s);
// Try replacing addresses
model.put("addresses/0", new Address("street D", "city D", "state D", 12345, false));
model.put("addresses/1", new Address("street E", "city E", "state E", 12345, false));
model.put("addresses/2", new Address("street F", "city F", "state F", 12345, false));
serializer = JsonSerializer.create().json5().build();
s = serializer.serialize(p);
expectedValue = "{name:'some name',age:123,addresses:[{street:'street D',city:'city D',state:'state D',zip:12345,isCurrent:false},{street:'street E',city:'city E',state:'state E',zip:12345,isCurrent:false},{street:'street F',city:'city F',state:'state F',zip:12345,isCurrent:false}]}";
assertEquals(expectedValue, s);
// Try removing an address
model.delete("addresses/1");
s = serializer.serialize(p);
expectedValue = "{name:'some name',age:123,addresses:[{street:'street D',city:'city D',state:'state D',zip:12345,isCurrent:false},{street:'street F',city:'city F',state:'state F',zip:12345,isCurrent:false}]}";
assertEquals(expectedValue, s);
model.delete("addresses/0");
model.delete("addresses/0");
s = serializer.serialize(p);
expectedValue = "{name:'some name',age:123,addresses:[]}";
assertEquals(expectedValue, s);
// Try adding an out-of-bounds address (should pad it with nulls)
model.put("addresses/2", new Address("street A", "city A", "state A", 12345, true));
s = serializer.serialize(p);
expectedValue = "{name:'some name',age:123,addresses:[null,null,{street:'street A',city:'city A',state:'state A',zip:12345,isCurrent:true}]}";
assertEquals(expectedValue, s);
// Try adding an address as a map (should be automatically converted to an Address)
var m = new HashMap<String,Object>();
m.put("street","street D");
m.put("city","city D");
m.put("state","state D");
m.put("zip",Integer.valueOf(12345));
// Try the same for an address in an array.
model.put("addresses/1", m);
s = ((Address)model.get("addresses/1")).toString();
expectedValue = "Address(street=street D,city=city D,state=state D,zip=12345,isCurrent=false)";
assertEquals(expectedValue, s);
// Try setting some fields.
model.put("addresses/1/zip", Integer.valueOf(99999));
s = model.get("addresses/1/zip").toString();
expectedValue = "99999";
assertEquals(expectedValue, s);
// Make sure we can get non-existent branches without throwing any exceptions.
// get() method should just return null.
model = ObjectRest.create(new JsonMap());
var o = model.get("xxx");
assertEquals("null", (""+o));
// Make sure blanks and "/" returns the root object.
s = model.get("").toString();
assertEquals("{}", s);
s = model.get("/").toString();
assertEquals("{}", s);
// Make sure doing a PUT against "" or "/" replaces the root object.
var m2 = JsonMap.ofJson("{x:1}");
model.put("", m2);
s = model.get("").toString();
assertEquals("{x:1}", s);
m2 = JsonMap.ofJson("{x:2}");
model.put("/", m2);
s = model.get("").toString();
assertEquals("{x:2}", s);
// Make sure doing a POST against "" or "/" adds to the root object.
model = ObjectRest.create(new JsonList());
model.post("", Integer.valueOf(1));
model.post("/", Integer.valueOf(2));
s = model.get("").toString();
assertEquals("[1,2]", s);
}
//====================================================================================================
// testAddressBook
//====================================================================================================
@Test void b02_addressBook() {
var model = ObjectRest.create(new AddressBook());
// Try adding a person to the address book.
var billClinton = new Person("Bill Clinton", 65,
new Address("55W. 125th Street", "New York", "NY", 10027, true)
);
model.post("/", billClinton);
// Make sure we get the original person back.
assertSame(billClinton, model.get("/0"));
}
public static class AddressBook extends LinkedList<Person> {
public AddressBook init() {
add(
new Person("Bill Clinton", 65,
new Address("55W. 125th Street", "New York", "NY", 10027, true)
)
);
return this;
}
}
@Bean(p="street,city,state,zip,isCurrent")
public static class Address {
public String street;
public String city;
public String state;
public int zip;
public boolean isCurrent;
public Address() {}
public Address(String street, String city, String state, int zip, boolean isCurrent) {
this.street = street;
this.city = city;
this.state = state;
this.zip = zip;
this.isCurrent = isCurrent;
}
@Override /* Object */
public String toString() {
return "Address(street="+street+",city="+city+",state="+state+",zip="+zip+",isCurrent="+isCurrent+")";
}
}
@Bean(typeName="Person",p="name,age,addresses")
public static class Person {
public String name;
public int age;
public Address[] addresses;
public Person() {}
public Person(String name, int age, Address...addresses) {
this.name = name;
this.age = age;
this.addresses = addresses;
}
@Override /* Object */
public String toString() {
return "Person(name="+name+",age="+age+")";
}
}
//====================================================================================================
// PojoRest(Object,ReaderParser)
//====================================================================================================
@Test void c01_constructors() {
var model = ObjectRest.create(new AddressBook(), JsonParser.DEFAULT);
// Try adding a person to the address book.
var billClinton = new Person("Bill Clinton", 65,
new Address("55W. 125th Street", "New York", "NY", 10027, true)
);
model.post("/", billClinton);
// Make sure we get the original person back.
assertSame(billClinton, model.get("/0"));
}
//====================================================================================================
// setRootLocked()
//====================================================================================================
@Test void d01_rootLocked() {
var model = ObjectRest.create(new AddressBook()).setRootLocked();
assertThrowsWithMessage(ObjectRestException.class, "Cannot overwrite root object", ()->model.put("", new AddressBook()));
assertThrowsWithMessage(ObjectRestException.class, "Cannot overwrite root object", ()->model.put(null, new AddressBook()));
assertThrowsWithMessage(ObjectRestException.class, "Cannot overwrite root object", ()->model.put("/", new AddressBook()));
}
//====================================================================================================
// getRootObject()
//====================================================================================================
@Test void e01_getRootObject() {
var model = ObjectRest.create(new AddressBook());
assertTrue(model.getRootObject() instanceof AddressBook);
model.put("", "foobar");
assertTrue(model.getRootObject() instanceof String);
model.put("", null);
assertNull(model.getRootObject());
}
//====================================================================================================
// get(Class<T> type, String url)
// get(Class<T> type, String url, T def)
// getString(String url)
// getString(String url, String defVal)
// getInt(String url)
// getInt(String url, Integer defVal)
// getLong(String url)
// getLong(String url, Long defVal)
// getBoolean(String url)
// getBoolean(String url, Boolean defVal)
// getMap(String url)
// getMap(String url, Map<?,?> defVal)
// getList(String url)
// getList(String url, List<?> defVal)
// getJsonMap(String url)
// getJsonMap(String url, JsonMap defVal)
// getJsonList(String url)
// getJsonList(String url, JsonList defVal)
//====================================================================================================
@Test void f01_getMethods() throws Exception {
var model = ObjectRest.create(new A());
var l = JsonList.ofJson("[{a:'b'}]");
var m = JsonMap.ofJson("{a:'b'}");
assertMapped(model, ObjectRest::get,
"f1,f2,f3,f4,f2a,f3a,f4a,f5,f6,f7,f8",
"<null>,0,0,false,<null>,<null>,<null>,<null>,<null>,<null>,<null>");
assertMapped(model, (r,p) -> r.getWithDefault(p, "foo"),
"f1,f2,f3,f4,f2a,f3a,f4a,f5,f6,f7,f8",
"foo,0,0,false,foo,foo,foo,foo,foo,foo,foo");
assertMapped(model, ObjectRest::getString,
"f1,f2,f3,f4,f2a,f3a,f4a,f5,f6,f7,f8",
"<null>,0,0,false,<null>,<null>,<null>,<null>,<null>,<null>,<null>");
assertMapped(model, (r,p) -> r.getString(p, "foo"),
"f1,f2,f3,f4,f2a,f3a,f4a,f5,f6,f7,f8",
"foo,0,0,false,foo,foo,foo,foo,foo,foo,foo");
assertMapped(model, ObjectRest::getInt,
"f1,f2,f3,f4,f2a,f3a,f4a,f5,f6,f7,f8",
"<null>,0,0,0,<null>,<null>,<null>,<null>,<null>,<null>,<null>");
assertMapped(model, (r,p) -> r.getInt(p, 1),
"f1,f2,f3,f4,f2a,f3a,f4a,f5,f6,f7,f8",
"1,0,0,0,1,1,1,1,1,1,1");
assertMapped(model, ObjectRest::getLong,
"f1,f2,f3,f4,f2a,f3a,f4a,f5,f6,f7,f8",
"<null>,0,0,0,<null>,<null>,<null>,<null>,<null>,<null>,<null>");
assertMapped(model, (r,p) -> r.getLong(p, 1L),
"f1,f2,f3,f4,f2a,f3a,f4a,f5,f6,f7,f8",
"1,0,0,0,1,1,1,1,1,1,1");
assertMapped(model, ObjectRest::getBoolean,
"f1,f2,f3,f4,f2a,f3a,f4a,f5,f6,f7,f8",
"<null>,false,false,false,<null>,<null>,<null>,<null>,<null>,<null>,<null>");
assertMapped(model, (r,p) -> r.getBoolean(p, true),
"f1,f2,f3,f4,f2a,f3a,f4a,f5,f6,f7,f8",
"true,false,false,false,true,true,true,true,true,true,true");
BiFunction<ObjectRest,String,Object> f1 = (r,p) -> {
try {
return r.getMap(p);
} catch (Exception e) {
return e.getClass().getSimpleName();
}
};
assertMapped(model, f1,
"f1,f2,f3,f4,f2a,f3a,f4a,f5,f6,f7,f8",
"<null>,InvalidDataConversionException,InvalidDataConversionException,InvalidDataConversionException,<null>,<null>,<null>,<null>,<null>,<null>,<null>");
assertEquals("{a:'b'}", model.getMap("f1", m).toString());
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f2", m));
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f3", m));
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f4", m));
assertEquals("{a:'b'}", model.getMap("f2a", m).toString());
assertEquals("{a:'b'}", model.getMap("f3a", m).toString());
assertEquals("{a:'b'}", model.getMap("f4a", m).toString());
assertEquals("{a:'b'}", model.getMap("f5", m).toString());
assertEquals("{a:'b'}", model.getMap("f6", m).toString());
assertEquals("{a:'b'}", model.getMap("f7", m).toString());
assertEquals("{a:'b'}", model.getMap("f8", m).toString());
BiFunction<ObjectRest,String,Object> f2 = (r,p) -> {
try {
return r.getMap(p, m);
} catch (Exception e) {
return e.getClass().getSimpleName();
}
};
assertMapped(model, f2,
"f1,f2,f2a,f3,f3a,f4,f4a,f5,f6,f7,f8",
"{a=b},InvalidDataConversionException,{a=b},InvalidDataConversionException,{a=b},InvalidDataConversionException,{a=b},{a=b},{a=b},{a=b},{a=b}");
BiFunction<ObjectRest,String,Object> f3 = (r,p) -> {
try {
return r.getJsonMap(p);
} catch (Exception e) {
return e.getClass().getSimpleName();
}
};
assertMapped(model, f3,
"f1,f2,f3,f4,f2a,f3a,f4a,f5,f6,f7,f8",
"<null>,InvalidDataConversionException,InvalidDataConversionException,InvalidDataConversionException,<null>,<null>,<null>,<null>,<null>,<null>,<null>");
BiFunction<ObjectRest,String,Object> f4 = (r,p) -> {
try {
return r.getJsonMap(p, m);
} catch (Exception e) {
return e.getClass().getSimpleName();
}
};
assertMapped(model, f4,
"f1,f2,f3,f4,f2a,f3a,f4a,f5,f6,f7,f8",
"{a=b},InvalidDataConversionException,InvalidDataConversionException,InvalidDataConversionException,{a=b},{a=b},{a=b},{a=b},{a=b},{a=b},{a=b}");
assertNull(model.getList("f1"));
assertThrows(InvalidDataConversionException.class, ()->model.getList("f2"));
assertThrows(InvalidDataConversionException.class, ()->model.getList("f3"));
assertThrows(InvalidDataConversionException.class, ()->model.getList("f4"));
assertNull(model.getList("f2a"));
assertNull(model.getList("f3a"));
assertNull(model.getList("f4a"));
assertNull(model.getList("f5"));
assertNull(model.getList("f6"));
assertNull(model.getList("f7"));
assertNull(model.getList("f8"));
assertEquals("[{a:'b'}]", model.getList("f1", l).toString());
assertThrows(InvalidDataConversionException.class, ()->model.getList("f2", l));
assertThrows(InvalidDataConversionException.class, ()->model.getList("f3", l));
assertThrows(InvalidDataConversionException.class, ()->model.getList("f4", l));
assertEquals("[{a:'b'}]", model.getList("f2a", l).toString());
assertEquals("[{a:'b'}]", model.getList("f3a", l).toString());
assertEquals("[{a:'b'}]", model.getList("f4a", l).toString());
assertEquals("[{a:'b'}]", model.getList("f5", l).toString());
assertEquals("[{a:'b'}]", model.getList("f6", l).toString());
assertEquals("[{a:'b'}]", model.getList("f7", l).toString());
assertEquals("[{a:'b'}]", model.getList("f8", l).toString());
assertNull(model.getJsonList("f1"));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonList("f2"));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonList("f3"));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonList("f4"));
assertNull(model.getJsonList("f2a"));
assertNull(model.getJsonList("f3a"));
assertNull(model.getJsonList("f4a"));
assertNull(model.getJsonList("f5"));
assertNull(model.getJsonList("f6"));
assertNull(model.getJsonList("f7"));
assertNull(model.getJsonList("f8"));
assertEquals("[{a:'b'}]", model.getJsonList("f1", l).toString());
assertThrows(InvalidDataConversionException.class, ()->model.getJsonList("f2", l));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonList("f3", l));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonList("f4", l));
assertEquals("[{a:'b'}]", model.getJsonList("f2a", l).toString());
assertEquals("[{a:'b'}]", model.getJsonList("f3a", l).toString());
assertEquals("[{a:'b'}]", model.getJsonList("f4a", l).toString());
assertEquals("[{a:'b'}]", model.getJsonList("f5", l).toString());
assertEquals("[{a:'b'}]", model.getJsonList("f6", l).toString());
assertEquals("[{a:'b'}]", model.getJsonList("f7", l).toString());
assertEquals("[{a:'b'}]", model.getJsonList("f8", l).toString());
((A)model.getRootObject()).init();
assertEquals("1", model.get("f1"));
assertEquals("2", model.get("f2").toString());
assertEquals("3", model.get("f3").toString());
assertEquals("true", model.get("f4").toString());
assertEquals("2", model.get("f2a").toString());
assertEquals("3", model.get("f3a").toString());
assertEquals("true", model.get("f4a").toString());
assertEquals("{f5a:'a'}", model.get("f5").toString());
assertEquals("[{f6a:'a'}]", model.get("f6").toString());
assertEquals("{f5a:'a'}", model.get("f7").toString());
assertEquals("[{f6a:'a'}]", model.get("f8").toString());
assertEquals("1", model.getWithDefault("f1", "foo"));
assertEquals("2", model.getWithDefault("f2", "foo").toString());
assertEquals("3", model.getWithDefault("f3", "foo").toString());
assertEquals("true", model.getWithDefault("f4", "foo").toString());
assertEquals("2", model.getWithDefault("f2a", "foo").toString());
assertEquals("3", model.getWithDefault("f3a", "foo").toString());
assertEquals("true", model.getWithDefault("f4a", "foo").toString());
assertEquals("{f5a:'a'}", model.getWithDefault("f5", "foo").toString());
assertEquals("[{f6a:'a'}]", model.getWithDefault("f6", "foo").toString());
assertEquals("{f5a:'a'}", model.getWithDefault("f7", "foo").toString());
assertEquals("[{f6a:'a'}]", model.getWithDefault("f8", "foo").toString());
assertEquals("1", model.getString("f1"));
assertEquals("2", model.getString("f2"));
assertEquals("3", model.getString("f3"));
assertEquals("true", model.getString("f4"));
assertEquals("2", model.getString("f2a"));
assertEquals("3", model.getString("f3a"));
assertEquals("true", model.getString("f4a"));
assertEquals("{f5a:'a'}", model.getString("f5"));
assertEquals("[{f6a:'a'}]", model.getString("f6"));
assertEquals("{f5a:'a'}", model.getString("f7"));
assertEquals("[{f6a:'a'}]", model.getString("f8"));
assertEquals("1", model.getString("f1", "foo"));
assertEquals("2", model.getString("f2", "foo"));
assertEquals("3", model.getString("f3", "foo"));
assertEquals("true", model.getString("f4", "foo"));
assertEquals("2", model.getString("f2a", "foo"));
assertEquals("3", model.getString("f3a", "foo"));
assertEquals("true", model.getString("f4a", "foo"));
assertEquals("{f5a:'a'}", model.getString("f5", "foo"));
assertEquals("[{f6a:'a'}]", model.getString("f6", "foo"));
assertEquals("{f5a:'a'}", model.getString("f7", "foo"));
assertEquals("[{f6a:'a'}]", model.getString("f8", "foo"));
assertEquals(1, (int)model.getInt("f1"));
assertEquals(2, (int)model.getInt("f2"));
assertEquals(3, (int)model.getInt("f3"));
assertEquals(1, (int)model.getInt("f4"));
assertEquals(2, (int)model.getInt("f2a"));
assertEquals(3, (int)model.getInt("f3a"));
assertEquals(1, (int)model.getInt("f4a"));
assertThrows(InvalidDataConversionException.class, ()->model.getInt("f5"));
assertThrows(InvalidDataConversionException.class, ()->model.getInt("f6"));
assertThrows(InvalidDataConversionException.class, ()->model.getInt("f7"));
assertThrows(InvalidDataConversionException.class, ()->model.getInt("f8"));
assertEquals(1, (int)model.getInt("f1", 9));
assertEquals(2, (int)model.getInt("f2", 9));
assertEquals(3, (int)model.getInt("f3", 9));
assertEquals(1, (int)model.getInt("f4", 9));
assertEquals(2, (int)model.getInt("f2a", 9));
assertEquals(3, (int)model.getInt("f3a", 9));
assertEquals(1, (int)model.getInt("f4a", 9));
assertThrows(InvalidDataConversionException.class, ()->model.getInt("f5", 9));
assertThrows(InvalidDataConversionException.class, ()->model.getInt("f6", 9));
assertThrows(InvalidDataConversionException.class, ()->model.getInt("f7", 9));
assertThrows(InvalidDataConversionException.class, ()->model.getInt("f8", 9));
assertEquals(1, (long)model.getLong("f1"));
assertEquals(2, (long)model.getLong("f2"));
assertEquals(3, (long)model.getLong("f3"));
assertEquals(1, (long)model.getLong("f4"));
assertEquals(2, (long)model.getLong("f2a"));
assertEquals(3, (long)model.getLong("f3a"));
assertEquals(1, (long)model.getLong("f4a"));
assertThrows(InvalidDataConversionException.class, ()->model.getLong("f5"));
assertThrows(InvalidDataConversionException.class, ()->model.getLong("f6"));
assertThrows(InvalidDataConversionException.class, ()->model.getLong("f7"));
assertThrows(InvalidDataConversionException.class, ()->model.getInt("f8"));
assertEquals(1, (long)model.getLong("f1", 9L));
assertEquals(2, (long)model.getLong("f2", 9L));
assertEquals(3, (long)model.getLong("f3", 9L));
assertEquals(1, (long)model.getLong("f4", 9L));
assertEquals(2, (long)model.getLong("f2a", 9L));
assertEquals(3, (long)model.getLong("f3a", 9L));
assertEquals(1, (long)model.getLong("f4a", 9L));
assertThrows(InvalidDataConversionException.class, ()->model.getLong("f5", 9L));
assertThrows(InvalidDataConversionException.class, ()->model.getLong("f6", 9L));
assertThrows(InvalidDataConversionException.class, ()->model.getLong("f7", 9L));
assertThrows(InvalidDataConversionException.class, ()->model.getLong("f8", 9L));
assertEquals(false, model.getBoolean("f1")); // String "1" equates to false.
assertEquals(true, model.getBoolean("f2"));
assertEquals(true, model.getBoolean("f3"));
assertEquals(true, model.getBoolean("f4"));
assertEquals(true, model.getBoolean("f2a"));
assertEquals(true, model.getBoolean("f3a"));
assertEquals(true, model.getBoolean("f4a"));
assertEquals(false, model.getBoolean("f5")); // "{a:'b'}" equates to false.
assertEquals(false, model.getBoolean("f6"));
assertEquals(false, model.getBoolean("f7"));
assertEquals(false, model.getBoolean("f8"));
assertEquals(false, model.getBoolean("f1", true)); // String "1" equates to false.
assertEquals(true, model.getBoolean("f2", true));
assertEquals(true, model.getBoolean("f3", true));
assertEquals(true, model.getBoolean("f4", true));
assertEquals(true, model.getBoolean("f2a", true));
assertEquals(true, model.getBoolean("f3a", true));
assertEquals(true, model.getBoolean("f4a", true));
assertEquals(false, model.getBoolean("f5", true)); // "{a:'b'}" equates to false.
assertEquals(false, model.getBoolean("f6", true));
assertEquals(false, model.getBoolean("f7", true));
assertEquals(false, model.getBoolean("f8", true));
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f1"));
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f2"));
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f3"));
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f4"));
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f2a"));
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f3a"));
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f4a"));
assertEquals("{f5a:'a'}", model.getMap("f5").toString());
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f6"));
assertEquals("{f5a:'a'}", model.getMap("f7").toString());
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f8"));
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f1", m));
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f2", m));
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f3", m));
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f4", m));
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f2a", m));
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f3a", m));
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f4a", m));
assertEquals("{f5a:'a'}", model.getMap("f5", m).toString());
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f6", m));
assertEquals("{f5a:'a'}", model.getMap("f7", m).toString());
assertThrows(InvalidDataConversionException.class, ()->model.getMap("f8", m));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonMap("f1"));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonMap("f2"));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonMap("f3"));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonMap("f4"));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonMap("f2a"));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonMap("f3a"));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonMap("f4a"));
assertEquals("{f5a:'a'}", model.getJsonMap("f5").toString());
assertThrows(InvalidDataConversionException.class, ()->model.getJsonMap("f6"));
assertEquals("{f5a:'a'}", model.getJsonMap("f7").toString());
assertThrows(InvalidDataConversionException.class, ()->model.getJsonMap("f8"));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonMap("f1", m));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonMap("f2", m));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonMap("f3", m));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonMap("f4", m));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonMap("f2a", m));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonMap("f3a", m));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonMap("f4a", m));
assertEquals("{f5a:'a'}", model.getJsonMap("f5", m).toString());
assertThrows(InvalidDataConversionException.class, ()->model.getJsonMap("f6", m));
assertEquals("{f5a:'a'}", model.getJsonMap("f7", m).toString());
assertThrows(InvalidDataConversionException.class, ()->model.getJsonMap("f8", m));
assertThrows(InvalidDataConversionException.class, ()->model.getList("f1"));
assertThrows(InvalidDataConversionException.class, ()->model.getList("f2"));
assertThrows(InvalidDataConversionException.class, ()->model.getList("f3"));
assertThrows(InvalidDataConversionException.class, ()->model.getList("f4"));
assertThrows(InvalidDataConversionException.class, ()->model.getList("f2a"));
assertThrows(InvalidDataConversionException.class, ()->model.getList("f3a"));
assertThrows(InvalidDataConversionException.class, ()->model.getList("f4a"));
assertEquals("[{f5a:'a'}]", model.getList("f5").toString());
assertEquals("[{f6a:'a'}]", model.getList("f6").toString());
assertEquals("[{f5a:'a'}]", model.getList("f7").toString());
assertEquals("[{f6a:'a'}]", model.getList("f8").toString());
assertThrows(InvalidDataConversionException.class, ()->model.getList("f1", l));
assertThrows(InvalidDataConversionException.class, ()->model.getList("f2", l));
assertThrows(InvalidDataConversionException.class, ()->model.getList("f3", l));
assertThrows(InvalidDataConversionException.class, ()->model.getList("f4", l));
assertThrows(InvalidDataConversionException.class, ()->model.getList("f2a", l));
assertThrows(InvalidDataConversionException.class, ()->model.getList("f3a", l));
assertThrows(InvalidDataConversionException.class, ()->model.getList("f4a", l));
assertEquals("[{f5a:'a'}]", model.getList("f5", l).toString());
assertEquals("[{f6a:'a'}]", model.getList("f6", l).toString());
assertEquals("[{f5a:'a'}]", model.getList("f7", l).toString());
assertEquals("[{f6a:'a'}]", model.getList("f8", l).toString());
assertThrows(InvalidDataConversionException.class, ()->model.getJsonList("f1"));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonList("f2"));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonList("f3"));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonList("f4"));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonList("f2a"));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonList("f3a"));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonList("f4a"));
assertEquals("[{f5a:'a'}]", model.getJsonList("f5").toString());
assertEquals("[{f6a:'a'}]", model.getJsonList("f6").toString());
assertEquals("[{f5a:'a'}]", model.getJsonList("f7").toString());
assertEquals("[{f6a:'a'}]", model.getJsonList("f8").toString());
assertThrows(InvalidDataConversionException.class, ()->model.getJsonList("f1", l));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonList("f2", l));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonList("f3", l));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonList("f4", l));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonList("f2a", l));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonList("f3a", l));
assertThrows(InvalidDataConversionException.class, ()->model.getJsonList("f4a", l));
assertEquals("[{f5a:'a'}]", model.getJsonList("f5", l).toString());
assertEquals("[{f6a:'a'}]", model.getJsonList("f6", l).toString());
assertEquals("[{f5a:'a'}]", model.getJsonList("f7", l).toString());
assertEquals("[{f6a:'a'}]", model.getJsonList("f8", l).toString());
}
public static class A {
public String f1;
public int f2;
public long f3;
public boolean f4;
public Integer f2a;
public Long f3a;
public Boolean f4a;
public Map f5;
public List f6;
public JsonMap f7;
public JsonList f8;
public A init() {
f1 = "1";
f2 = 2;
f3 = 3L;
f4 = true;
f2a = 2;
f3a = 3L;
f4a = true;
try {
f5 = JsonMap.ofJson("{f5a:'a'}");
f6 = JsonList.ofJson("[{f6a:'a'}]");
f7 = JsonMap.ofJson("{f5a:'a'}");
f8 = JsonList.ofJson("[{f6a:'a'}]");
} catch (ParseException e) {
throw new RuntimeException(e);
}
return this;
}
}
//====================================================================================================
// invokeMethod(String url, String method, String args)
//====================================================================================================
@Test void f02_invokeMethod() throws Exception {
var model = ObjectRest.create(new AddressBook().init());
assertEquals("Person(name=Bill Clinton,age=65)", model.invokeMethod("0", "toString", ""));
model = ObjectRest.create(new AddressBook().init(), JsonParser.DEFAULT);
assertEquals("Person(name=Bill Clinton,age=65)", model.invokeMethod("0", "toString", ""));
assertEquals("NY", model.invokeMethod("0/addresses/0/state", "toString", ""));
assertNull(model.invokeMethod("1", "toString", ""));
}
//====================================================================================================
// getPublicMethods(String url)
//====================================================================================================
@Test void f03_getPublicMethods() {
var model = ObjectRest.create(new AddressBook().init());
assertTrue(Json5Serializer.DEFAULT.toString(model.getPublicMethods("0")).contains("'toString'"));
assertTrue(Json5Serializer.DEFAULT.toString(model.getPublicMethods("0/addresses/0/state")).contains("'toString'"));
assertNull(model.getPublicMethods("1"));
}
//====================================================================================================
// getClassMeta(String url)
//====================================================================================================
@Test void f04_getClassMeta() {
var model = ObjectRest.create(new AddressBook().init());
assertEquals("Person", model.getClassMeta("0").getInnerClass().getSimpleName());
assertEquals("String", model.getClassMeta("0/addresses/0/state").getInnerClass().getSimpleName());
assertNull(model.getClassMeta("1"));
assertNull(model.getClassMeta("0/addresses/1/state"));
}
} |
apache/incubator-hugegraph | 38,020 | hugegraph-pd/hg-pd-core/src/main/java/org/apache/hugegraph/pd/TaskScheduleService.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hugegraph.pd;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.PriorityQueue;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import org.apache.hugegraph.pd.common.KVPair;
import org.apache.hugegraph.pd.common.PDException;
import org.apache.hugegraph.pd.config.PDConfig;
import org.apache.hugegraph.pd.grpc.MetaTask;
import org.apache.hugegraph.pd.grpc.Metapb;
import org.apache.hugegraph.pd.grpc.Pdpb;
import org.apache.hugegraph.pd.meta.TaskInfoMeta;
import org.apache.hugegraph.pd.raft.RaftEngine;
import lombok.extern.slf4j.Slf4j;
/**
* The task scheduling service checks the status of stores, resources, and partitions on a
* regular basis, migrates data in a timely manner, and errors are on nodes
* 1. Monitor whether the store is offline
* 2. Check whether the replica of the partition is correct
* 3. Check whether the working mode of the partition is correct
* 4. Monitor whether the partition needs to be split and whether the split is completed
*/
@Slf4j
public class TaskScheduleService {
private static final String BALANCE_SHARD_KEY = "BALANCE_SHARD_KEY";
// The dynamic balancing can only be carried out after the machine is offline for 30 minutes
private final long TurnOffAndBalanceInterval = 30 * 60 * 1000;
// leader balances the time interval
private final long BalanceLeaderInterval = 30 * 1000;
private final PDConfig pdConfig;
private final long clusterStartTime; //
private final StoreNodeService storeService;
private final PartitionService partitionService;
private final ScheduledExecutorService executor;
private final TaskInfoMeta taskInfoMeta;
private final StoreMonitorDataService storeMonitorDataService;
private final KvService kvService;
private final LogService logService;
private final Comparator<KVPair<Long, Integer>> kvPairComparatorAsc = (o1, o2) -> {
if (o1.getValue() == o2.getValue()) {
return o1.getKey().compareTo(o2.getKey());
}
return o1.getValue().compareTo(o2.getValue());
};
private final Comparator<KVPair<Long, Integer>> kvPairComparatorDesc = (o1, o2) -> {
if (o1.getValue() == o2.getValue()) {
return o2.getKey().compareTo(o1.getKey());
}
return o2.getValue().compareTo(o1.getValue());
};
private long lastStoreTurnoffTime = 0;
private long lastBalanceLeaderTime = 0;
public TaskScheduleService(PDConfig config, StoreNodeService storeService,
PartitionService partitionService) {
this.pdConfig = config;
this.storeService = storeService;
this.partitionService = partitionService;
this.taskInfoMeta = new TaskInfoMeta(config);
this.logService = new LogService(pdConfig);
this.storeMonitorDataService = new StoreMonitorDataService(pdConfig);
this.clusterStartTime = System.currentTimeMillis();
this.kvService = new KvService(pdConfig);
this.executor = new ScheduledThreadPoolExecutor(16);
}
public void init() {
executor.scheduleWithFixedDelay(() -> {
try {
patrolStores();
} catch (Throwable e) {
log.error("patrolStores exception: ", e);
}
}, 60, 60, TimeUnit.SECONDS);
executor.scheduleWithFixedDelay(() -> {
try {
patrolPartitions();
balancePartitionLeader(false);
balancePartitionShard();
} catch (Throwable e) {
log.error("patrolPartitions exception: ", e);
}
}, pdConfig.getPatrolInterval(), pdConfig.getPatrolInterval(), TimeUnit.SECONDS);
executor.scheduleWithFixedDelay(() -> {
if (isLeader()) {
kvService.clearTTLData();
}
}, 1000, 1000, TimeUnit.MILLISECONDS);
executor.scheduleWithFixedDelay(
() -> {
if (isLeader()) {
storeService.getQuotaChecker();
}
}, 2, 30,
TimeUnit.SECONDS);
// clean expired monitor data each 10 minutes, delay 3min.
if (isLeader() && this.pdConfig.getStore().isMonitorDataEnabled()) {
executor.scheduleAtFixedRate(() -> {
Long expTill = System.currentTimeMillis() / 1000 -
this.pdConfig.getStore().getRetentionPeriod();
log.debug("monitor data keys before " + expTill + " will be deleted");
int records = 0;
try {
for (Metapb.Store store : storeService.getStores()) {
int cnt =
this.storeMonitorDataService.removeExpiredMonitorData(store.getId(),
expTill);
log.debug("store id :{}, records:{}", store.getId(), cnt);
records += cnt;
}
} catch (PDException e) {
throw new RuntimeException(e);
}
log.debug(String.format("%d records has been deleted", records));
}, 180, 600, TimeUnit.SECONDS);
}
storeService.addStatusListener(new StoreStatusListener() {
@Override
public void onStoreStatusChanged(Metapb.Store store, Metapb.StoreState old,
Metapb.StoreState status) {
if (status == Metapb.StoreState.Tombstone) {
lastStoreTurnoffTime = System.currentTimeMillis();
}
if (status == Metapb.StoreState.Up) {
executor.schedule(() -> {
try {
balancePartitionLeader(false);
} catch (PDException e) {
log.error("exception {}", e);
}
}, BalanceLeaderInterval, TimeUnit.MILLISECONDS);
}
}
@Override
public void onGraphChange(Metapb.Graph graph,
Metapb.GraphState stateOld,
Metapb.GraphState stateNew) {
}
@Override
public void onStoreRaftChanged(Metapb.Store store) {
}
});
}
public void shutDown() {
executor.shutdownNow();
}
private boolean isLeader() {
return RaftEngine.getInstance().isLeader();
}
/**
* Inspect all stores to see if they are online and have enough storage space
*/
public List<Metapb.Store> patrolStores() throws PDException {
if (!isLeader()) {
return null;
}
List<Metapb.Store> changedStores = new ArrayList<>();
// Check your store online status
List<Metapb.Store> stores = storeService.getStores("");
Map<Long, Metapb.Store> activeStores = storeService.getActiveStores("")
.stream().collect(
Collectors.toMap(Metapb.Store::getId, t -> t));
for (Metapb.Store store : stores) {
Metapb.Store changeStore = null;
if ((store.getState() == Metapb.StoreState.Up
|| store.getState() == Metapb.StoreState.Unknown)
&& !activeStores.containsKey(store.getId())) {
// If you are not online, the modification status is offline
changeStore = Metapb.Store.newBuilder(store)
.setState(Metapb.StoreState.Offline)
.build();
} else if ((store.getState() == Metapb.StoreState.Exiting &&
!activeStores.containsKey(store.getId())) ||
(store.getState() == Metapb.StoreState.Offline &&
(System.currentTimeMillis() - store.getLastHeartbeat() >
pdConfig.getStore().getMaxDownTime() * 1000) &&
(System.currentTimeMillis() - clusterStartTime >
pdConfig.getStore().getMaxDownTime() * 1000))) {
// Manually change the parameter to Offline or Offline Duration
// Modify the status to shut down and increase checkStoreCanOffline detect
if (storeService.checkStoreCanOffline(store)) {
changeStore = Metapb.Store.newBuilder(store)
.setState(Metapb.StoreState.Tombstone).build();
this.logService.insertLog(LogService.NODE_CHANGE,
LogService.TASK, changeStore);
log.info("patrolStores store {} Offline", changeStore.getId());
}
}
if (changeStore != null) {
storeService.updateStore(changeStore);
changedStores.add(changeStore);
}
}
return changedStores;
}
/**
* Inspect all partitions to check whether the number of replicas is correct and the number
* of replicas in the shard group
*/
public List<Metapb.Partition> patrolPartitions() throws PDException {
if (!isLeader()) {
return null;
}
// If the number of replicas is inconsistent, reallocate replicas
for (Metapb.ShardGroup group : storeService.getShardGroups()) {
if (group.getShardsCount() != pdConfig.getPartition().getShardCount()) {
storeService.reallocShards(group);
kvService.put(BALANCE_SHARD_KEY, "DOING", 180 * 1000);
}
}
// Check if the shard is online.
Map<Long, Metapb.Store> tombStores = storeService.getTombStores().stream().collect(
Collectors.toMap(Metapb.Store::getId, t -> t));
var partIds = new HashSet<Integer>();
for (var pair : tombStores.entrySet()) {
for (var partition : partitionService.getPartitionByStore(pair.getValue())) {
if (partIds.contains(partition.getId())) {
continue;
}
partIds.add(partition.getId());
storeService.storeTurnoff(pair.getValue());
partitionService.shardOffline(partition, pair.getValue().getId());
}
}
return null;
}
/**
* Balance the number of partitions between stores
* It takes half an hour for the machine to turn to UP before it can be dynamically balanced
*/
public synchronized Map<Integer, KVPair<Long, Long>> balancePartitionShard() throws
PDException {
log.info("balancePartitions starting, isleader:{}", isLeader());
if (!isLeader()) {
return null;
}
if (System.currentTimeMillis() - lastStoreTurnoffTime < TurnOffAndBalanceInterval) {
return null;
}
int activeStores = storeService.getActiveStores().size();
if (activeStores == 0) {
log.warn("balancePartitionShard non active stores, skip to balancePartitionShard");
return null;
}
if (Objects.equals(kvService.get(BALANCE_SHARD_KEY), "DOING")) {
return null;
}
int totalShards = pdConfig.getConfigService().getPartitionCount() *
pdConfig.getPartition().getShardCount();
int averageCount = totalShards / activeStores;
int remainder = totalShards % activeStores;
// Count the partitions on each store, StoreId -> PartitionID, ShardRole
Map<Long, Map<Integer, Metapb.ShardRole>> partitionMap = new HashMap<>();
storeService.getActiveStores().forEach(store -> {
partitionMap.put(store.getId(), new HashMap<>());
});
AtomicReference<Boolean> isLeaner = new AtomicReference<>(false);
partitionService.getPartitions().forEach(partition -> {
try {
storeService.getShardList(partition.getId()).forEach(shard -> {
Long storeId = shard.getStoreId();
if (shard.getRole() == Metapb.ShardRole.Learner
|| partition.getState() != Metapb.PartitionState.PState_Normal) {
isLeaner.set(true);
}
if (partitionMap.containsKey(storeId)) {
partitionMap.get(storeId).put(partition.getId(), shard.getRole());
}
});
} catch (PDException e) {
log.error("get partition {} shard list error:{}.", partition.getId(),
e.getMessage());
}
});
if (isLeaner.get()) {
log.warn("balancePartitionShard is doing, skip this balancePartitionShard task");
return null;
}
// According to shard sort the quantity from highest to lowest
List<KVPair<Long, Integer>> sortedList = new ArrayList<>();
partitionMap.forEach((storeId, shards) -> {
sortedList.add(new KVPair(storeId, shards.size()));
});
sortedList.sort(((o1, o2) -> o2.getValue().compareTo(o1.getValue())));
// The largest heap, moved in store -> shard count
PriorityQueue<KVPair<Long, Integer>> maxHeap = new PriorityQueue<>(sortedList.size(),
(o1, o2) -> o2.getValue()
.compareTo(
o1.getValue()));
// of individual copies committedIndex
Map<Integer, Map<Long, Long>> committedIndexMap = partitionService.getCommittedIndexStats();
// Partition ID -->source StoreID, target StoreID
Map<Integer, KVPair<Long, Long>> movedPartitions = new HashMap<>();
// Remove redundant shards, traverse the stores in the order of shards from most to
// least, and the remainder is allocated to the store with more shards first, reducing
// the probability of migration
for (int index = 0; index < sortedList.size(); index++) {
long storeId = sortedList.get(index).getKey();
if (!partitionMap.containsKey(storeId)) {
log.error("cannot found storeId {} in partitionMap", storeId);
return null;
}
Map<Integer, Metapb.ShardRole> shards = partitionMap.get(storeId);
int targetCount = index < remainder ? averageCount + 1 : averageCount;
// Remove the redundant shards and add the source StoreID. is not a leader, and the
// partition is unique
if (shards.size() > targetCount) {
int movedCount = shards.size() - targetCount;
log.info(
"balancePartitionShard storeId {}, shardsSize {}, targetCount {}, " +
"moveCount {}",
storeId, shards.size(), targetCount, movedCount);
for (Iterator<Integer> iterator = shards.keySet().iterator();
movedCount > 0 && iterator.hasNext(); ) {
Integer id = iterator.next();
if (!movedPartitions.containsKey(id)) {
log.info("store {}, shard of partition {} can be moved", storeId, id);
movedPartitions.put(id, new KVPair<>(storeId, 0L));
movedCount--;
}
}
} else if (shards.size() < targetCount) {
int addCount = targetCount - shards.size();
log.info(
"balancePartitionShard storeId {}, shardsSize {}, targetCount {}, " +
"addCount {}",
storeId, shards.size(), targetCount, addCount);
maxHeap.add(new KVPair<>(storeId, addCount));
}
}
if (movedPartitions.size() == 0) {
log.warn(
"movedPartitions is empty, totalShards:{} averageCount:{} remainder:{} " +
"sortedList:{}",
totalShards, averageCount, remainder, sortedList);
}
Iterator<Map.Entry<Integer, KVPair<Long, Long>>> moveIterator =
movedPartitions.entrySet().iterator();
while (moveIterator.hasNext()) {
if (maxHeap.size() == 0) {
break;
}
Map.Entry<Integer, KVPair<Long, Long>> moveEntry = moveIterator.next();
int partitionId = moveEntry.getKey();
long sourceStoreId = moveEntry.getValue().getKey();
List<KVPair<Long, Integer>> tmpList = new ArrayList<>(maxHeap.size());
while (maxHeap.size() > 0) {
KVPair<Long, Integer> pair = maxHeap.poll();
long destStoreId = pair.getKey();
boolean destContains = false;
if (partitionMap.containsKey(destStoreId)) {
destContains = partitionMap.get(destStoreId).containsKey(partitionId);
}
// If the destination store already contains the partition, take the store
if (!destContains) {
moveEntry.getValue().setValue(pair.getKey());
log.info(
"balancePartitionShard will move partition {} from store {} to store " +
"{}",
moveEntry.getKey(),
moveEntry.getValue().getKey(),
moveEntry.getValue().getValue());
if (pair.getValue() > 1) {
pair.setValue(pair.getValue() - 1);
tmpList.add(pair);
}
break;
}
tmpList.add(pair);
}
maxHeap.addAll(tmpList);
}
kvService.put(BALANCE_SHARD_KEY, "DOING", 180 * 1000);
// Start the migration
movedPartitions.forEach((partId, storePair) -> {
// Neither the source nor destination storeID is 0
if (storePair.getKey() > 0 && storePair.getValue() > 0) {
partitionService.movePartitionsShard(partId, storePair.getKey(),
storePair.getValue());
} else {
log.warn("balancePartitionShard key or value is zero, partId:{} storePair:{}",
partId, storePair);
}
});
return movedPartitions;
}
/**
* Balance the number of leaders of partitions between stores
*/
public synchronized Map<Integer, Long> balancePartitionLeader(boolean immediately) throws
PDException {
Map<Integer, Long> results = new HashMap<>();
if (!isLeader()) {
return results;
}
if (!immediately &&
System.currentTimeMillis() - lastBalanceLeaderTime < BalanceLeaderInterval) {
return results;
}
lastBalanceLeaderTime = System.currentTimeMillis();
List<Metapb.ShardGroup> shardGroups = storeService.getShardGroups();
// When a task is split or scaled-in, it is exited
var taskMeta = storeService.getTaskInfoMeta();
if (taskMeta.hasSplitTaskDoing() || taskMeta.hasMoveTaskDoing()) {
throw new PDException(1001, "split or combine task is processing, please try later!");
}
if (Objects.equals(kvService.get(BALANCE_SHARD_KEY), "DOING")) {
throw new PDException(1001, "balance shard is processing, please try later!");
}
if (shardGroups.size() == 0) {
return results;
}
Map<Long, Integer> storeShardCount = new HashMap<>();
shardGroups.forEach(group -> {
group.getShardsList().forEach(shard -> {
storeShardCount.put(shard.getStoreId(),
storeShardCount.getOrDefault(shard.getStoreId(), 0) + 1);
});
});
log.info("balancePartitionLeader, shard group size: {}, by store: {}", shardGroups.size(),
storeShardCount);
PriorityQueue<KVPair<Long, Integer>> targetCount =
new PriorityQueue<>(kvPairComparatorDesc);
var sortedGroups = storeShardCount.entrySet().stream()
.map(entry -> new KVPair<>(entry.getKey(),
entry.getValue()))
.sorted(kvPairComparatorAsc)
.collect(Collectors.toList());
int sum = 0;
for (int i = 0; i < sortedGroups.size() - 1; i++) {
// at least one
int v = Math.max(
sortedGroups.get(i).getValue() / pdConfig.getPartition().getShardCount(), 1);
targetCount.add(new KVPair<>(sortedGroups.get(i).getKey(), v));
sum += v;
}
targetCount.add(new KVPair<>(sortedGroups.get(sortedGroups.size() - 1).getKey(),
shardGroups.size() - sum));
log.info("target count: {}", targetCount);
for (var group : shardGroups) {
var map = group.getShardsList().stream()
.collect(Collectors.toMap(Metapb.Shard::getStoreId, shard -> shard));
var tmpList = new ArrayList<KVPair<Long, Integer>>();
// If there are many stores, they may not contain the corresponding store ID. Save
// the non-compliant stores to the temporary list until you find a suitable store
while (!targetCount.isEmpty()) {
var pair = targetCount.poll();
var storeId = pair.getKey();
if (map.containsKey(storeId)) {
if (map.get(storeId).getRole() != Metapb.ShardRole.Leader) {
log.info("shard group{}, store id:{}, set to leader", group.getId(),
storeId);
partitionService.transferLeader(group.getId(), map.get(storeId));
results.put(group.getId(), storeId);
} else {
log.info("shard group {}, store id :{}, is leader, no need change",
group.getId(), storeId);
}
if (pair.getValue() > 1) {
// count -1
pair.setValue(pair.getValue() - 1);
tmpList.add(pair);
}
// If it is found, the processing is complete
break;
} else {
tmpList.add(pair);
}
}
targetCount.addAll(tmpList);
}
return results;
}
private long getMaxIndexGap(Map<Integer, Map<Long, Long>> committedIndexMap, int partitionId) {
long maxGap = Long.MAX_VALUE;
if (committedIndexMap == null || !committedIndexMap.containsKey(partitionId)) {
return maxGap;
}
Map<Long, Long> shardMap = committedIndexMap.get(partitionId);
if (shardMap == null || shardMap.size() == 0) {
return maxGap;
}
List<Long> sortedList = new ArrayList<>();
shardMap.forEach((storeId, committedIndex) -> {
sortedList.add(committedIndex);
});
sortedList.sort(Comparator.reverseOrder());
maxGap = sortedList.get(0) - sortedList.get(sortedList.size() - 1);
return maxGap;
}
/**
* Perform partition splitting, which is divided into automatic splitting and manual splitting
*
* @return
* @throws PDException
*/
public List<Metapb.Partition> splitPartition(
Pdpb.OperationMode mode, List<Pdpb.SplitDataParam> params) throws PDException {
if (mode == Pdpb.OperationMode.Auto) {
return autoSplitPartition();
}
var list = params.stream()
.map(param -> new KVPair<>(param.getPartitionId(), param.getCount()))
.collect(Collectors.toList());
storeService.splitShardGroups(list);
return null;
}
/**
* Partition splitting is performed automatically, and each store reaches the maximum number
* of partitions
* execution conditions
* The number of partitions per machine after the split is less than partition
* .max-partitions-per-store
*
* @throws PDException
*/
public List<Metapb.Partition> autoSplitPartition() throws PDException {
if (!isLeader()) {
return null;
}
if (Metapb.ClusterState.Cluster_OK != storeService.getClusterStats().getState()) {
if (Metapb.ClusterState.Cluster_Offline == storeService.getClusterStats().getState()) {
throw new PDException(Pdpb.ErrorType.Split_Partition_Doing_VALUE,
"The data is splitting");
} else {
throw new PDException(Pdpb.ErrorType.Cluster_State_Forbid_Splitting_VALUE,
"The current state of the cluster prohibits splitting data");
}
}
// The maximum split count that a compute cluster can support
int splitCount = pdConfig.getPartition().getMaxShardsPerStore() *
storeService.getActiveStores().size() /
(storeService.getShardGroups().size() *
pdConfig.getPartition().getShardCount());
if (splitCount < 2) {
throw new PDException(Pdpb.ErrorType.Too_Many_Partitions_Per_Store_VALUE,
"Too many partitions per store, partition.store-max-shard-count" +
" = "
+ pdConfig.getPartition().getMaxShardsPerStore());
}
// If the maximum number of partitions per store is not reached, it will be split
log.info("Start to split partitions..., split count = {}", splitCount);
// Set the cluster status to Offline
storeService.updateClusterStatus(Metapb.ClusterState.Cluster_Offline);
// Modify the default number of partitions
// pdConfig.getConfigService().setPartitionCount(storeService.getShardGroups().size() *
// splitCount);
var list = storeService.getShardGroups().stream()
.map(shardGroup -> new KVPair<>(shardGroup.getId(), splitCount))
.collect(Collectors.toList());
storeService.splitShardGroups(list);
return null;
}
/**
* Store reports the status of the task
* The state of the partition changes, and the state of the ShardGroup, graph, and the entire
* cluster where the partition resides
*
* @param task
*/
public void reportTask(MetaTask.Task task) {
try {
switch (task.getType()) {
case Split_Partition:
partitionService.handleSplitTask(task);
break;
case Move_Partition:
partitionService.handleMoveTask(task);
break;
case Clean_Partition:
partitionService.handleCleanPartitionTask(task);
break;
default:
break;
}
} catch (Exception e) {
log.error("Report task exception {}, {}", e, task);
}
}
/**
* Compaction on rocksdb
*
* @throws PDException
*/
public Boolean dbCompaction(String tableName) throws PDException {
if (!isLeader()) {
return false;
}
for (Metapb.ShardGroup shardGroup : storeService.getShardGroups()) {
storeService.shardGroupsDbCompaction(shardGroup.getId(), tableName);
}
//
return true;
}
/**
* Determine whether all partitions of a store can be migrated out, and give the judgment
* result and migration plan
*/
public Map<String, Object> canAllPartitionsMovedOut(Metapb.Store sourceStore) throws
PDException {
if (!isLeader()) {
return null;
}
// Analyze whether the partition on a store can be completely checked out
Map<String, Object> resultMap = new HashMap<>();
// The definition object is used to hold the partition above the source store StoreId
// ->PartitionID, ShardRole
Map<Long, Map<Integer, Metapb.ShardRole>> sourcePartitionMap = new HashMap<>();
sourcePartitionMap.put(sourceStore.getId(), new HashMap<>());
// The definition object is used to hold the partition above the other active stores
// StoreId ->PartitionID, ShardRole
Map<Long, Map<Integer, Metapb.ShardRole>> otherPartitionMap = new HashMap<>();
// The amount of disk space remaining for each store
Map<Long, Long> availableDiskSpace = new HashMap<>();
// Record the amount of data in the partition to be migrated
Map<Integer, Long> partitionDataSize = new HashMap<>();
storeService.getActiveStores().forEach(store -> {
if (store.getId() != sourceStore.getId()) {
otherPartitionMap.put(store.getId(), new HashMap<>());
// Records the remaining disk space of other stores, in bytes
availableDiskSpace.put(store.getId(), store.getStats().getAvailable());
} else {
resultMap.put("current_store_is_online", true);
}
});
// Count the size of the partition to be migrated (from storeStats in KB)
for (Metapb.GraphStats graphStats : sourceStore.getStats().getGraphStatsList()) {
partitionDataSize.put(graphStats.getPartitionId(),
partitionDataSize.getOrDefault(graphStats.getPartitionId(), 0L)
+ graphStats.getApproximateSize());
}
// Assign values to sourcePartitionMap and otherPartitionMap
partitionService.getPartitions().forEach(partition -> {
try {
storeService.getShardList(partition.getId()).forEach(shard -> {
long storeId = shard.getStoreId();
if (storeId == sourceStore.getId()) {
sourcePartitionMap.get(storeId).put(partition.getId(), shard.getRole());
} else {
if (otherPartitionMap.containsKey(storeId)) {
otherPartitionMap.get(storeId).put(partition.getId(), shard.getRole());
}
}
});
} catch (PDException e) {
throw new RuntimeException(e);
}
});
// Count the partitions to be removed: all partitions on the source store
Map<Integer, KVPair<Long, Long>> movedPartitions = new HashMap<>();
for (Map.Entry<Integer, Metapb.ShardRole> entry : sourcePartitionMap.get(
sourceStore.getId()).entrySet()) {
movedPartitions.put(entry.getKey(), new KVPair<>(sourceStore.getId(), 0L));
}
// Count the number of partitions of other stores and save them with a small top heap, so
// that stores with fewer partitions are always prioritized
PriorityQueue<KVPair<Long, Integer>> minHeap = new PriorityQueue<>(otherPartitionMap.size(),
(o1, o2) -> o1.getValue()
.compareTo(
o2.getValue()));
otherPartitionMap.forEach((storeId, shards) -> {
minHeap.add(new KVPair(storeId, shards.size()));
});
// Traverse the partitions to be migrated, and prioritize the migration to the store with
// fewer partitions
Iterator<Map.Entry<Integer, KVPair<Long, Long>>> moveIterator =
movedPartitions.entrySet().iterator();
while (moveIterator.hasNext()) {
Map.Entry<Integer, KVPair<Long, Long>> moveEntry = moveIterator.next();
int partitionId = moveEntry.getKey();
// Record the elements that have popped up in the priority
List<KVPair<Long, Integer>> tmpList = new ArrayList<>();
while (minHeap.size() > 0) {
KVPair<Long, Integer> pair = minHeap.poll(); // The first element pops up
long storeId = pair.getKey();
int partitionCount = pair.getValue();
Map<Integer, Metapb.ShardRole> shards = otherPartitionMap.get(storeId);
final int unitRate = 1024; // Balance the feed rate of different storage units
if ((!shards.containsKey(partitionId)) && (
availableDiskSpace.getOrDefault(storeId, 0L) / unitRate >=
partitionDataSize.getOrDefault(partitionId, 0L))) {
// If the partition is not included on the destination store and the
// remaining space of the destination store can accommodate the partition,
// the migration is performed
moveEntry.getValue().setValue(storeId); // Set the target store for the move
log.info("plan to move partition {} to store {}, " +
"available disk space {}, current partitionSize:{}",
partitionId,
storeId,
availableDiskSpace.getOrDefault(storeId, 0L) / unitRate,
partitionDataSize.getOrDefault(partitionId, 0L)
);
// Update the expected remaining space for the store
availableDiskSpace.put(storeId, availableDiskSpace.getOrDefault(storeId, 0L)
- partitionDataSize.getOrDefault(partitionId,
0L) *
unitRate);
// Update the number of partitions for that store in the stat variable
partitionCount += 1;
pair.setValue(partitionCount);
tmpList.add(pair);
break;
} else {
tmpList.add(pair);
}
}
minHeap.addAll(tmpList);
}
// Check that there are no partitions that don't have a target store assigned
List<Integer> remainPartitions = new ArrayList<>();
movedPartitions.forEach((partId, storePair) -> {
if (storePair.getValue() == 0L) {
remainPartitions.add(partId);
}
});
if (remainPartitions.size() > 0) {
resultMap.put("flag", false);
resultMap.put("movedPartitions", null);
} else {
resultMap.put("flag", true);
resultMap.put("movedPartitions", movedPartitions);
}
return resultMap;
}
public Map<Integer, KVPair<Long, Long>> movePartitions(
Map<Integer, KVPair<Long, Long>> movedPartitions) {
if (!isLeader()) {
return null;
}
// Start the migration
log.info("begin move partitions:");
movedPartitions.forEach((partId, storePair) -> {
// Neither the source nor destination storeID is 0
if (storePair.getKey() > 0 && storePair.getValue() > 0) {
partitionService.movePartitionsShard(partId, storePair.getKey(),
storePair.getValue());
}
});
return movedPartitions;
}
}
|
apache/myfaces | 37,851 | impl/src/main/java/org/apache/myfaces/config/impl/FacesConfigDispenserImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.myfaces.config.impl;
import jakarta.faces.render.RenderKitFactory;
import org.apache.myfaces.config.FacesConfigDispenser;
import org.apache.myfaces.config.element.Application;
import org.apache.myfaces.config.element.Behavior;
import org.apache.myfaces.config.element.ClientBehaviorRenderer;
import org.apache.myfaces.config.element.Component;
import org.apache.myfaces.config.element.ComponentTagDeclaration;
import org.apache.myfaces.config.element.ContractMapping;
import org.apache.myfaces.config.element.Converter;
import org.apache.myfaces.config.element.FaceletsProcessing;
import org.apache.myfaces.config.element.FaceletsTemplateMapping;
import org.apache.myfaces.config.element.FacesConfig;
import org.apache.myfaces.config.element.FacesConfigExtension;
import org.apache.myfaces.config.element.FacesFlowDefinition;
import org.apache.myfaces.config.element.Factory;
import org.apache.myfaces.config.element.LocaleConfig;
import org.apache.myfaces.config.element.NamedEvent;
import org.apache.myfaces.config.element.NavigationRule;
import org.apache.myfaces.config.element.RenderKit;
import org.apache.myfaces.config.element.Renderer;
import org.apache.myfaces.config.element.ResourceBundle;
import org.apache.myfaces.config.element.SystemEventListener;
import org.apache.myfaces.config.element.ViewPoolMapping;
import org.apache.myfaces.config.element.facelets.FaceletTagLibrary;
import org.apache.myfaces.config.impl.element.RenderKitImpl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
/**
* @author <a href="mailto:oliver@rossmueller.com">Oliver Rossmueller</a>
*/
public class FacesConfigDispenserImpl extends FacesConfigDispenser
{
private static final long serialVersionUID = 3550379003287939559L;
private static final Logger log = Logger.getLogger(FacesConfigDispenserImpl.class.getName());
// Factories
private List<String> applicationFactories = new ArrayList<>();
private List<String> exceptionHandlerFactories = new ArrayList<>();
private List<String> externalContextFactories = new ArrayList<>();
private List<String> facesContextFactories = new ArrayList<>();
private List<String> lifecycleFactories = new ArrayList<>();
private List<String> viewDeclarationLanguageFactories = new ArrayList<>();
private List<String> partialViewContextFactories = new ArrayList<>();
private List<String> renderKitFactories = new ArrayList<>();
private List<String> tagHandlerDelegateFactories = new ArrayList<>();
private List<String> visitContextFactories = new ArrayList<>();
private List<String> faceletCacheFactories = new ArrayList<>();
private List<String> flashFactories = new ArrayList<>();
private List<String> clientWindowFactories = new ArrayList<>();
private List<String> flowHandlerFactories = new ArrayList<>();
private List<String> searchExpressionContextFactories = new ArrayList<>();
private List<String> facesServletFactories = new ArrayList<>();
private String defaultRenderKitId;
private String messageBundle;
private String facesVersion;
private LocaleConfig localeConfig;
private Map<String, Component> components = new HashMap<>();
private Map<String, String> converterByClass = new HashMap<>();
private Map<String, String> converterById = new HashMap<>();
private Map<String, String> validators = new HashMap<>();
private List<Behavior> behaviors = new ArrayList<>();
private Map<String, Converter> converterConfigurationByClassName = new HashMap<>();
private Map<String, RenderKit> renderKits = new LinkedHashMap<>();
private List<String> actionListeners = new ArrayList<>();
private List<String> elResolvers = new ArrayList<>();
private List<String> lifecyclePhaseListeners = new ArrayList<>();
private List<String> navigationHandlers = new ArrayList<>();
private List<String> propertyResolver = new ArrayList<>();
private List<String> resourceHandlers = new ArrayList<>();
private List<String> stateManagers = new ArrayList<>();
private List<String> variableResolver = new ArrayList<>();
private List<String> viewHandlers = new ArrayList<>();
private List<String> defaultValidatorIds = new ArrayList<>();
private List<String> defaultAnnotatedValidatorIds = new ArrayList<>();
private List<String> searchExpressionHandlers = new ArrayList<>();
private List<String> searchKeywordResolvers = new ArrayList<>();
private List<NavigationRule> navigationRules = new ArrayList<>();
private List<ResourceBundle> resourceBundles = new ArrayList<>();
private List<SystemEventListener> systemEventListeners = new ArrayList<>();
private List<NamedEvent> namedEvents = new ArrayList<>();
private Map<String, FaceletsProcessing> faceletsProcessingByFileExtension
= new HashMap<>();
private List<FacesFlowDefinition> facesFlowDefinitions = new ArrayList<>();
private List<String> protectedViewUrlPatterns = new ArrayList<>();
private List<ContractMapping> resourceLibraryContractMappings = new ArrayList<>();
private List<ComponentTagDeclaration> componentTagDeclarations = new ArrayList<>();
private List<FaceletTagLibrary> faceletTagLibraries = new ArrayList<>();
private List <String> resourceResolvers = new ArrayList<>();
private List<ViewPoolMapping> viewPoolMappings = new ArrayList<>();
private List<FaceletsTemplateMapping> faceletsTemplateMappings = new ArrayList<>();
// Unmodifiable list/maps to avoid modifications
private transient List<String> umapplicationFactories;
private transient List<String> umexceptionHandlerFactories;
private transient List<String> umexternalContextFactories;
private transient List<String> umfacesContextFactories;
private transient List<String> umlifecycleFactories;
private transient List<String> umviewDeclarationLanguageFactories;
private transient List<String> umpartialViewContextFactories;
private transient List<String> umrenderKitFactories;
private transient List<String> umtagHandlerDelegateFactories;
private transient List<String> umvisitContextFactories;
private transient List<String> umfaceletCacheFactories;
private transient List<String> umflashFactories;
private transient List<String> umclientWindowFactories;
private transient List<String> umflowHandlerFactories;
private transient List<String> umsearchExpressionContextFactories;
private transient List<String> umfacesServletFactories;
private transient List<Behavior> umbehaviors;
private transient List<String> umactionListeners;
private transient List<String> umelResolvers;
private transient List<String> umlifecyclePhaseListeners;
private transient List<String> umnavigationHandlers;
private transient List<String> umpropertyResolver;
private transient List<String> umresourceHandlers;
private transient List<String> umstateManagers;
private transient List<String> umvariableResolver;
private transient List<String> umviewHandlers;
private transient List<String> umsearchExpressionHandlers;
private transient List<String> umsearchKeywordResolvers;
private transient List<NavigationRule> umnavigationRules;
private transient List<ResourceBundle> umresourceBundles;
private transient List<SystemEventListener> umsystemEventListeners;
private transient List<NamedEvent> umnamedEvents;
private transient List<FacesFlowDefinition> umfacesFlowDefinitions;
private transient List<String> umprotectedViewUrlPatterns;
private transient List<ContractMapping> umresourceLibraryContractMappings;
private transient List<ComponentTagDeclaration> umcomponentTagDeclarations;
private transient List<FaceletTagLibrary> umfaceletTagLibraries;
private transient List <String> umresourceResolvers;
private transient List<ViewPoolMapping> umviewPoolMappings;
private transient List<FaceletsTemplateMapping> umfaceletsTemplateMappings;
/**
* Add another unmarshalled faces config object.
*
* @param config
* unmarshalled faces config object
*/
@Override
public void feed(FacesConfig config)
{
for (Factory factory : config.getFactories())
{
applicationFactories.addAll(factory.getApplicationFactory());
exceptionHandlerFactories.addAll(factory.getExceptionHandlerFactory());
externalContextFactories.addAll(factory.getExternalContextFactory());
facesContextFactories.addAll(factory.getFacesContextFactory());
lifecycleFactories.addAll(factory.getLifecycleFactory());
viewDeclarationLanguageFactories.addAll(factory.getViewDeclarationLanguageFactory());
partialViewContextFactories.addAll(factory.getPartialViewContextFactory());
renderKitFactories.addAll(factory.getRenderkitFactory());
tagHandlerDelegateFactories.addAll(factory.getTagHandlerDelegateFactory());
visitContextFactories.addAll(factory.getVisitContextFactory());
faceletCacheFactories.addAll(factory.getFaceletCacheFactory());
flashFactories.addAll(factory.getFlashFactory());
clientWindowFactories.addAll(factory.getClientWindowFactory());
flowHandlerFactories.addAll(factory.getFlowHandlerFactory());
searchExpressionContextFactories.addAll(factory.getSearchExpressionContextFactory());
facesServletFactories.addAll(factory.getFacesServletFactory());
}
for (Component component : config.getComponents())
{
components.put(component.getComponentType(), component);
}
validators.putAll(config.getValidators());
behaviors.addAll(config.getBehaviors());
for (Application application : config.getApplications())
{
if (!application.getDefaultRenderkitId().isEmpty())
{
defaultRenderKitId =
application.getDefaultRenderkitId().get(application.getDefaultRenderkitId().size() - 1);
}
if (!application.getMessageBundle().isEmpty())
{
messageBundle = application.getMessageBundle().get(application.getMessageBundle().size() - 1);
}
if (!application.getLocaleConfig().isEmpty())
{
localeConfig = application.getLocaleConfig().get(application.getLocaleConfig().size() - 1);
}
actionListeners.addAll(application.getActionListener());
navigationHandlers.addAll(application.getNavigationHandler());
resourceHandlers.addAll(application.getResourceHandler());
viewHandlers.addAll(application.getViewHandler());
stateManagers.addAll(application.getStateManager());
propertyResolver.addAll(application.getPropertyResolver());
variableResolver.addAll(application.getVariableResolver());
resourceBundles.addAll(application.getResourceBundle());
elResolvers.addAll(application.getElResolver());
resourceLibraryContractMappings.addAll(application.getResourceLibraryContractMappings());
searchExpressionHandlers.addAll(application.getSearchExpressionHandler());
searchKeywordResolvers.addAll(application.getSearchKeywordResolver());
// Jsf 2.0 spec section 3.5.3 says this:
// ".... Any configuration resource that declares a list of default
// validators overrides any list provided in a previously processed
// configuration resource. If an empty <default-validators/> element
// is found in a configuration resource, the list
// of default validators must be cleared....."
if (application.isDefaultValidatorsPresent())
{
// we have a <default-validators> element, so any existing
// default validators should be removed
defaultValidatorIds.clear();
// now add all default-validator entries (could be zero)
defaultValidatorIds.addAll(application.getDefaultValidatorIds());
}
else
{
//If isDefaultValidatorsPresent() is false, and there are still
//default validators, it means they were added using annotations, so
//they are not affected by the empty entry according to section 3.5.3
defaultAnnotatedValidatorIds.addAll(application.getDefaultValidatorIds());
}
systemEventListeners.addAll(application.getSystemEventListeners());
}
for (Converter converter : config.getConverters())
{
if (converter.getConverterId() != null)
{
converterById.put(converter.getConverterId(), converter.getConverterClass());
}
if (converter.getForClass() != null)
{
String oldConverter = converterByClass.get(converter.getForClass());
// don't log if someone overwrites the built-in converters
if (oldConverter != null && !oldConverter.startsWith("jakarta.faces.convert."))
{
log.warning("There is already a converter defined for class: " + converter.getForClass() + "."
+ " old: " + oldConverter
+ " new: " + converter.getConverterClass());
}
converterByClass.put(converter.getForClass(), converter.getConverterClass());
}
converterConfigurationByClassName.put(converter.getConverterClass(), converter);
}
for (RenderKit renderKit : config.getRenderKits())
{
String renderKitId = renderKit.getId();
if (renderKitId == null)
{
renderKitId = RenderKitFactory.HTML_BASIC_RENDER_KIT;
}
RenderKit existing = renderKits.get(renderKitId);
if (existing == null)
{
existing = new RenderKitImpl();
existing.merge(renderKit);
renderKits.put(renderKitId, existing);
}
else
{
existing.merge(renderKit);
}
}
for (FacesConfigExtension extension : config.getFacesConfigExtensions())
{
for (FaceletsProcessing faceletsProcessing : extension.getFaceletsProcessingList())
{
if (faceletsProcessing.getFileExtension() != null && faceletsProcessing.getFileExtension().length() > 0)
{
faceletsProcessingByFileExtension.put(faceletsProcessing.getFileExtension(), faceletsProcessing);
}
}
}
componentTagDeclarations.addAll(config.getComponentTagDeclarations());
faceletTagLibraries.addAll(config.getFaceletTagLibraryList());
lifecyclePhaseListeners.addAll(config.getLifecyclePhaseListener());
navigationRules.addAll(config.getNavigationRules());
facesVersion = config.getVersion();
namedEvents.addAll(config.getNamedEvents());
facesFlowDefinitions.addAll(config.getFacesFlowDefinitions());
protectedViewUrlPatterns.addAll(config.getProtectedViewsUrlPatternList());
resourceResolvers.addAll(config.getResourceResolversList());
for (FacesConfigExtension extension : config.getFacesConfigExtensions())
{
viewPoolMappings.addAll(extension.getViewPoolMappings());
}
for (FacesConfigExtension extension : config.getFacesConfigExtensions())
{
faceletsTemplateMappings.addAll(extension.getFaceletsTemplateMappings());
}
}
/**
* Add another ApplicationFactory class name
*
* @param factoryClassName a class name
*/
@Override
public void feedApplicationFactory(String factoryClassName)
{
applicationFactories.add(factoryClassName);
}
@Override
public void feedExceptionHandlerFactory(String factoryClassName)
{
exceptionHandlerFactories.add(factoryClassName);
}
@Override
public void feedExternalContextFactory(String factoryClassName)
{
externalContextFactories.add(factoryClassName);
}
/**
* Add another FacesContextFactory class name
*
* @param factoryClassName a class name
*/
@Override
public void feedFacesContextFactory(String factoryClassName)
{
facesContextFactories.add(factoryClassName);
}
/**
* Add another LifecycleFactory class name
*
* @param factoryClassName a class name
*/
@Override
public void feedLifecycleFactory(String factoryClassName)
{
lifecycleFactories.add(factoryClassName);
}
@Override
public void feedViewDeclarationLanguageFactory(String factoryClassName)
{
viewDeclarationLanguageFactories.add(factoryClassName);
}
@Override
public void feedPartialViewContextFactory(String factoryClassName)
{
partialViewContextFactories.add(factoryClassName);
}
/**
* Add another RenderKitFactory class name
*
* @param factoryClassName a class name
*/
@Override
public void feedRenderKitFactory(String factoryClassName)
{
renderKitFactories.add(factoryClassName);
}
@Override
public void feedTagHandlerDelegateFactory(String factoryClassName)
{
tagHandlerDelegateFactories.add(factoryClassName);
}
@Override
public void feedVisitContextFactory(String factoryClassName)
{
visitContextFactories.add(factoryClassName);
}
/**
* @return Collection over ApplicationFactory class names
*/
@Override
public Collection<String> getApplicationFactoryIterator()
{
if (umapplicationFactories == null)
{
umapplicationFactories = Collections.unmodifiableList(applicationFactories);
}
return umapplicationFactories;
}
@Override
public Collection<String> getExceptionHandlerFactoryIterator()
{
if (umexceptionHandlerFactories == null)
{
umexceptionHandlerFactories = Collections.unmodifiableList(exceptionHandlerFactories);
}
return umexceptionHandlerFactories;
}
@Override
public Collection<String> getExternalContextFactoryIterator()
{
if (umexternalContextFactories == null)
{
umexternalContextFactories = Collections.unmodifiableList(externalContextFactories);
}
return umexternalContextFactories;
}
/**
* @return Collection over FacesContextFactory class names
*/
@Override
public Collection<String> getFacesContextFactoryIterator()
{
if (umfacesContextFactories == null)
{
umfacesContextFactories = Collections.unmodifiableList(facesContextFactories);
}
return umfacesContextFactories;
}
/**
* @return Collection over LifecycleFactory class names
*/
@Override
public Collection<String> getLifecycleFactoryIterator()
{
if (umlifecycleFactories == null)
{
umlifecycleFactories = Collections.unmodifiableList(lifecycleFactories);
}
return umlifecycleFactories;
}
@Override
public Collection<String> getViewDeclarationLanguageFactoryIterator()
{
if (umviewDeclarationLanguageFactories == null)
{
umviewDeclarationLanguageFactories = Collections.unmodifiableList(viewDeclarationLanguageFactories);
}
return umviewDeclarationLanguageFactories;
}
@Override
public Collection<String> getPartialViewContextFactoryIterator()
{
if (umpartialViewContextFactories == null)
{
umpartialViewContextFactories = Collections.unmodifiableList(partialViewContextFactories);
}
return umpartialViewContextFactories;
}
/**
* @return Collection over RenderKit factory class names
*/
@Override
public Collection<String> getRenderKitFactoryIterator()
{
if (umrenderKitFactories == null)
{
umrenderKitFactories = Collections.unmodifiableList(renderKitFactories);
}
return umrenderKitFactories;
}
@Override
public Collection<String> getTagHandlerDelegateFactoryIterator()
{
if (umtagHandlerDelegateFactories == null)
{
umtagHandlerDelegateFactories = Collections.unmodifiableList(tagHandlerDelegateFactories);
}
return umtagHandlerDelegateFactories;
}
@Override
public Collection<String> getVisitContextFactoryIterator()
{
if (umvisitContextFactories == null)
{
umvisitContextFactories = Collections.unmodifiableList(visitContextFactories);
}
return umvisitContextFactories;
}
/**
* @return Collection over ActionListener class names
*/
@Override
public Collection<String> getActionListenerIterator()
{
if (umactionListeners == null)
{
umactionListeners = Collections.unmodifiableList(actionListeners);
}
return umactionListeners;
}
/**
* @return the default render kit id
*/
@Override
public String getDefaultRenderKitId()
{
return defaultRenderKitId;
}
/**
* @return Collection over message bundle names
*/
@Override
public String getMessageBundle()
{
return messageBundle;
}
/**
* @return Collection over NavigationHandler class names
*/
@Override
public Collection<String> getNavigationHandlerIterator()
{
if (umnavigationHandlers == null)
{
umnavigationHandlers = Collections.unmodifiableList(navigationHandlers);
}
return umnavigationHandlers;
}
/**
* @return Collection over ResourceHandler class names
*/
@Override
public Collection<String> getResourceHandlerIterator()
{
if (umresourceHandlers == null)
{
umresourceHandlers = Collections.unmodifiableList(resourceHandlers);
}
return umresourceHandlers;
}
/**
* @return Collection over ViewHandler class names
*/
@Override
public Collection<String> getViewHandlerIterator()
{
if (umviewHandlers == null)
{
umviewHandlers = Collections.unmodifiableList(viewHandlers);
}
return umviewHandlers;
}
/**
* @return Collection over StateManager class names
*/
@Override
public Collection<String> getStateManagerIterator()
{
if (umstateManagers == null)
{
umstateManagers = Collections.unmodifiableList(stateManagers);
}
return umstateManagers;
}
/**
* @return Collection over PropertyResolver class names
*/
@Override
public Collection<String> getPropertyResolverIterator()
{
if (umpropertyResolver == null)
{
umpropertyResolver = Collections.unmodifiableList(propertyResolver);
}
return umpropertyResolver;
}
/**
* @return Collection over VariableResolver class names
*/
@Override
public Collection<String> getVariableResolverIterator()
{
if (umvariableResolver == null)
{
umvariableResolver = Collections.unmodifiableList(variableResolver);
}
return umvariableResolver;
}
/**
* @return the default locale name
*/
@Override
public String getDefaultLocale()
{
if (localeConfig != null)
{
return localeConfig.getDefaultLocale();
}
return null;
}
/**
* @return Collection over supported locale names
*/
@Override
public Collection<String> getSupportedLocalesIterator()
{
Collection<String> locale;
if (localeConfig != null)
{
locale = Collections.unmodifiableCollection(localeConfig.getSupportedLocales());
}
else
{
locale = Collections.emptyList();
}
return locale;
}
/**
* @return Collection over all defined component types
*/
@Override
public Collection<String> getComponentTypes()
{
return Collections.unmodifiableCollection(components.keySet());
}
@Override
public Map<String, Component> getComponentsByType()
{
return Collections.unmodifiableMap(components);
}
/**
* @return component class that belongs to the given component type
*/
@Override
public String getComponentClass(String componentType)
{
Component component = components.get(componentType);
return component == null ? null : component.getComponentClass();
}
/**
* @return Collection over all defined converter ids
*/
@Override
public Collection<String> getConverterIds()
{
return Collections.unmodifiableCollection(converterById.keySet());
}
@Override
public Map<String, String> getConverterClassesById()
{
return Collections.unmodifiableMap(converterById);
}
/**
* @return Collection over all classes with an associated converter
*/
@Override
public Collection<String> getConverterClasses()
{
return Collections.unmodifiableCollection(converterByClass.keySet());
}
@Override
public Map<String, String> getConverterClassesByClass()
{
return Collections.unmodifiableMap(converterByClass);
}
@Override
public Collection<String> getConverterConfigurationByClassName()
{
return Collections.unmodifiableCollection(converterConfigurationByClassName.keySet());
}
@Override
public Converter getConverterConfiguration(String converterClassName)
{
return converterConfigurationByClassName.get(converterClassName);
}
/**
* @return converter class that belongs to the given converter id
*/
@Override
public String getConverterClassById(String converterId)
{
return converterById.get(converterId);
}
/**
* @return converter class that is associated with the given class name
*/
@Override
public String getConverterClassByClass(String className)
{
return converterByClass.get(className);
}
/**
* @return Collection over all defined default validator ids
*/
@Override
public Collection<String> getDefaultValidatorIds ()
{
List<String> allDefaultValidatorIds = new ArrayList<>();
allDefaultValidatorIds.addAll(defaultAnnotatedValidatorIds);
allDefaultValidatorIds.addAll(defaultValidatorIds);
return Collections.unmodifiableCollection(allDefaultValidatorIds);
}
/**
* @return Collection over all defined validator ids
*/
@Override
public Collection<String> getValidatorIds()
{
return Collections.unmodifiableCollection(validators.keySet());
}
@Override
public Map<String, String> getValidatorClassesById()
{
return Collections.unmodifiableMap(validators);
}
/**
* @return validator class name that belongs to the given validator id
*/
@Override
public String getValidatorClass(String validatorId)
{
return validators.get(validatorId);
}
/**
* @return Collection over {@link org.apache.myfaces.config.element.NavigationRule NavigationRule}s
*/
@Override
public Collection<NavigationRule> getNavigationRules()
{
if (umnavigationRules == null)
{
umnavigationRules = Collections.unmodifiableList(navigationRules);
}
return umnavigationRules;
}
/**
* @return Collection over all defined renderkit ids
*/
@Override
public Collection<String> getRenderKitIds()
{
return Collections.unmodifiableCollection(renderKits.keySet());
}
/**
* @return renderkit class name for given renderkit id
*/
@Override
public Collection<String> getRenderKitClasses(String renderKitId)
{
return renderKits.get(renderKitId).getRenderKitClasses();
}
/**
* @return Iterator over
* {@link org.apache.myfaces.config.element.ClientBehaviorRenderer ClientBehaviorRenderer}s
* for the given renderKitId
*/
@Override
public Collection<ClientBehaviorRenderer> getClientBehaviorRenderers (String renderKitId)
{
return renderKits.get (renderKitId).getClientBehaviorRenderers();
}
/**
* @return Collection over {@link org.apache.myfaces.config.element.Renderer Renderer}s for the given renderKitId
*/
@Override
public Collection<Renderer> getRenderers(String renderKitId)
{
return renderKits.get(renderKitId).getRenderer();
}
/**
* @return Collection over {@link jakarta.faces.event.PhaseListener} implementation class names
*/
@Override
public Collection<String> getLifecyclePhaseListeners()
{
if (umlifecyclePhaseListeners == null)
{
umlifecyclePhaseListeners = Collections.unmodifiableList(lifecyclePhaseListeners);
}
return umlifecyclePhaseListeners;
}
@Override
public Collection<ResourceBundle> getResourceBundles()
{
if (umresourceBundles == null)
{
umresourceBundles = Collections.unmodifiableList(resourceBundles);
}
return umresourceBundles;
}
@Override
public Collection<String> getElResolvers()
{
if (umelResolvers == null)
{
umelResolvers = Collections.unmodifiableList(elResolvers);
}
return umelResolvers;
}
@Override
public Collection<SystemEventListener> getSystemEventListeners()
{
if (umsystemEventListeners == null)
{
umsystemEventListeners = Collections.unmodifiableList(systemEventListeners);
}
return umsystemEventListeners;
}
@Override
public Collection<Behavior> getBehaviors()
{
if (umbehaviors == null)
{
umbehaviors = Collections.unmodifiableList(behaviors);
}
return umbehaviors;
}
@Override
public String getFacesVersion()
{
return facesVersion;
}
@Override
public Collection<NamedEvent> getNamedEvents()
{
if (umnamedEvents == null)
{
umnamedEvents = Collections.unmodifiableList(namedEvents);
}
return umnamedEvents;
}
@Override
public Collection<FaceletsProcessing> getFaceletsProcessing()
{
return Collections.unmodifiableCollection(faceletsProcessingByFileExtension.values());
}
@Override
public FaceletsProcessing getFaceletsProcessingConfiguration(String fileExtension)
{
return faceletsProcessingByFileExtension.get(fileExtension);
}
@Override
public void feedFaceletCacheFactory(String factoryClassName)
{
faceletCacheFactories.add(factoryClassName);
}
@Override
public Collection<String> getFaceletCacheFactoryIterator()
{
if (umfaceletCacheFactories == null)
{
umfaceletCacheFactories = Collections.unmodifiableList(faceletCacheFactories);
}
return umfaceletCacheFactories;
}
@Override
public void feedFlashFactory(String factoryClassName)
{
flashFactories.add(factoryClassName);
}
@Override
public Collection<String> getFlashFactoryIterator()
{
if (umflashFactories == null)
{
umflashFactories = Collections.unmodifiableList(flashFactories);
}
return umflashFactories;
}
@Override
public Collection<String> getFlowHandlerFactoryIterator()
{
if (umflowHandlerFactories == null)
{
umflowHandlerFactories = Collections.unmodifiableList(flowHandlerFactories);
}
return umflowHandlerFactories;
}
@Override
public void feedClientWindowFactory(String factoryClassName)
{
clientWindowFactories.add(factoryClassName);
}
@Override
public Collection<String> getClientWindowFactoryIterator()
{
if (umclientWindowFactories == null)
{
umclientWindowFactories = Collections.unmodifiableList(clientWindowFactories);
}
return umclientWindowFactories;
}
@Override
public Collection<FacesFlowDefinition> getFacesFlowDefinitions()
{
if (umfacesFlowDefinitions == null)
{
umfacesFlowDefinitions = Collections.unmodifiableList(facesFlowDefinitions);
}
return umfacesFlowDefinitions;
}
@Override
public Collection<String> getProtectedViewUrlPatterns()
{
if (umprotectedViewUrlPatterns == null)
{
umprotectedViewUrlPatterns = Collections.unmodifiableList(protectedViewUrlPatterns);
}
return umprotectedViewUrlPatterns;
}
@Override
public Collection<ContractMapping> getResourceLibraryContractMappings()
{
if (umresourceLibraryContractMappings == null)
{
umresourceLibraryContractMappings = Collections.unmodifiableList(resourceLibraryContractMappings);
}
return umresourceLibraryContractMappings;
}
@Override
public Collection<ComponentTagDeclaration> getComponentTagDeclarations()
{
if (umcomponentTagDeclarations == null)
{
umcomponentTagDeclarations = Collections.unmodifiableList(componentTagDeclarations);
}
return umcomponentTagDeclarations;
}
@Override
public Collection<String> getResourceResolvers()
{
if (umresourceResolvers == null)
{
umresourceResolvers = Collections.unmodifiableList(resourceResolvers);
}
return umresourceResolvers;
}
@Override
public Collection<FaceletTagLibrary> getTagLibraries()
{
if (umfaceletTagLibraries == null)
{
umfaceletTagLibraries = Collections.unmodifiableList(faceletTagLibraries);
}
return umfaceletTagLibraries;
}
@Override
public Collection<ViewPoolMapping> getViewPoolMappings()
{
if (umviewPoolMappings == null)
{
umviewPoolMappings = Collections.unmodifiableList(viewPoolMappings);
}
return umviewPoolMappings;
}
@Override
public void feedSearchExpressionContextFactory(String factoryClassName)
{
searchExpressionContextFactories.add(factoryClassName);
}
@Override
public Collection<String> getSearchExpressionContextFactoryIterator()
{
if (umsearchExpressionContextFactories == null)
{
umsearchExpressionContextFactories = Collections.unmodifiableList(searchExpressionContextFactories);
}
return umsearchExpressionContextFactories;
}
@Override
public Collection<String> getSearchKeywordResolvers()
{
if (umsearchKeywordResolvers == null)
{
umsearchKeywordResolvers = Collections.unmodifiableList(searchKeywordResolvers);
}
return umsearchKeywordResolvers;
}
@Override
public Collection<String> getSearchExpressionHandlerIterator()
{
if (umsearchExpressionHandlers == null)
{
umsearchExpressionHandlers = Collections.unmodifiableList(searchExpressionHandlers);
}
return umsearchExpressionHandlers;
}
@Override
public Collection<FaceletsTemplateMapping> getFaceletsTemplateMappings()
{
if (umfaceletsTemplateMappings == null)
{
umfaceletsTemplateMappings = Collections.unmodifiableList(faceletsTemplateMappings);
}
return umfaceletsTemplateMappings;
}
@Override
public void feedFacesServletFactory(String factoryClassName)
{
facesServletFactories.add(factoryClassName);
}
@Override
public Collection<String> getFacesServletFactoryIterator()
{
if (umfacesServletFactories == null)
{
umfacesServletFactories = Collections.unmodifiableList(facesServletFactories);
}
return umfacesServletFactories;
}
}
|
apache/uima-uimaj | 37,901 | uimaj-core/src/test/java/org/apache/uima/cas/impl/FeaturePathTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.uima.cas.impl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import org.apache.uima.UIMAFramework;
import org.apache.uima.cas.ArrayFS;
import org.apache.uima.cas.BooleanArrayFS;
import org.apache.uima.cas.ByteArrayFS;
import org.apache.uima.cas.CAS;
import org.apache.uima.cas.CASException;
import org.apache.uima.cas.CASRuntimeException;
import org.apache.uima.cas.DoubleArrayFS;
import org.apache.uima.cas.Feature;
import org.apache.uima.cas.FeaturePath;
import org.apache.uima.cas.FloatArrayFS;
import org.apache.uima.cas.IntArrayFS;
import org.apache.uima.cas.LongArrayFS;
import org.apache.uima.cas.ShortArrayFS;
import org.apache.uima.cas.StringArrayFS;
import org.apache.uima.cas.Type;
import org.apache.uima.cas.TypeClass;
import org.apache.uima.cas.text.AnnotationFS;
import org.apache.uima.resource.metadata.TypeSystemDescription;
import org.apache.uima.test.junit_extension.JUnitExtension;
import org.apache.uima.util.CasCreationUtils;
import org.apache.uima.util.XMLInputSource;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
class FeaturePathTest {
/**
* Tests all primitive feature path types.
*/
@Test
void testPrimitiveFeaturePathTypes() throws Exception {
XMLInputSource in = new XMLInputSource(
JUnitExtension.getFile("featurePathTests/FeaturePathTestTypeSystem.xml"));
TypeSystemDescription typeSystemDescription = UIMAFramework.getXMLParser()
.parseTypeSystemDescription(in);
CAS cas = CasCreationUtils.createCas(typeSystemDescription, null, null);
cas.setDocumentText("Sample Text");
// test string feature
Feature stringFeat = cas.getDocumentAnnotation().getType()
.getFeatureByBaseName("stringFeature");
cas.getDocumentAnnotation().setStringValue(stringFeat, "TestString");
String path = "/stringFeature";
FeaturePath featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals("TestString", featurePath.getStringValue(cas.getDocumentAnnotation()));
assertEquals("TestString", featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals(TypeClass.TYPE_CLASS_STRING,
featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(stringFeat.getRange(), featurePath.getType(cas.getDocumentAnnotation()));
assertTrue(featurePath.size() == 1);
assertTrue(featurePath.getFeature(0) == stringFeat);
// test short feature
Feature shortFeat = cas.getDocumentAnnotation().getType().getFeatureByBaseName("shortFeature");
cas.getDocumentAnnotation().setShortValue(shortFeat, (short) 12);
path = "/shortFeature";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(Short.valueOf((short) 12), featurePath.getShortValue(cas.getDocumentAnnotation()));
assertEquals("12", featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals(TypeClass.TYPE_CLASS_SHORT, featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(shortFeat.getRange(), featurePath.getType(cas.getDocumentAnnotation()));
assertEquals(null, featurePath.getValueAsString(null));
assertEquals(null, featurePath.getStringValue(null));
// test float feature
Feature floatFeat = cas.getDocumentAnnotation().getType().getFeatureByBaseName("floatFeature");
cas.getDocumentAnnotation().setFloatValue(floatFeat, 1.12f);
path = "/floatFeature";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
Assertions.assertThat(featurePath.getFloatValue(cas.getDocumentAnnotation())).isEqualTo(1.12f);
assertEquals("1.12", featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals(TypeClass.TYPE_CLASS_FLOAT, featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(floatFeat.getRange(), featurePath.getType(cas.getDocumentAnnotation()));
assertEquals(null, featurePath.getValueAsString(null));
assertEquals(null, featurePath.getFloatValue(null));
// test double feature
Feature doubleFeat = cas.getDocumentAnnotation().getType()
.getFeatureByBaseName("doubleFeature");
cas.getDocumentAnnotation().setDoubleValue(doubleFeat, 100.5);
path = "/doubleFeature";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
Assertions.assertThat(featurePath.getDoubleValue(cas.getDocumentAnnotation())).isEqualTo(100.5);
assertEquals("100.5", featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals(TypeClass.TYPE_CLASS_DOUBLE,
featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(doubleFeat.getRange(), featurePath.getType(cas.getDocumentAnnotation()));
assertEquals(null, featurePath.getValueAsString(null));
assertEquals(null, featurePath.getDoubleValue(null));
// test long feature
Feature longFeat = cas.getDocumentAnnotation().getType().getFeatureByBaseName("longFeature");
cas.getDocumentAnnotation().setLongValue(longFeat, 2000);
path = "/longFeature";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(Long.valueOf(2000), featurePath.getLongValue(cas.getDocumentAnnotation()));
assertEquals("2000", featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals(TypeClass.TYPE_CLASS_LONG, featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(longFeat.getRange(), featurePath.getType(cas.getDocumentAnnotation()));
assertEquals(null, featurePath.getValueAsString(null));
assertEquals(null, featurePath.getLongValue(null));
// test int feature
Feature intFeat = cas.getDocumentAnnotation().getType().getFeatureByBaseName("intFeature");
cas.getDocumentAnnotation().setIntValue(intFeat, 5);
path = "/intFeature";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(Integer.valueOf(5), featurePath.getIntValue(cas.getDocumentAnnotation()));
assertEquals("5", featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals(TypeClass.TYPE_CLASS_INT, featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(intFeat.getRange(), featurePath.getType(cas.getDocumentAnnotation()));
assertEquals(null, featurePath.getValueAsString(null));
assertEquals(null, featurePath.getIntValue(null));
// test boolean feature
Feature boolFeat = cas.getDocumentAnnotation().getType().getFeatureByBaseName("booleanFeature");
cas.getDocumentAnnotation().setBooleanValue(boolFeat, true);
path = "/booleanFeature";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(Boolean.valueOf(true), featurePath.getBooleanValue(cas.getDocumentAnnotation()));
assertEquals("true", featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals(TypeClass.TYPE_CLASS_BOOLEAN,
featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(boolFeat.getRange(), featurePath.getType(cas.getDocumentAnnotation()));
assertEquals(null, featurePath.getValueAsString(null));
assertEquals(null, featurePath.getBooleanValue(null));
// test byte feature
Feature byteFeat = cas.getDocumentAnnotation().getType().getFeatureByBaseName("byteFeature");
cas.getDocumentAnnotation().setByteValue(byteFeat, (byte) 127);
path = "/byteFeature";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(Byte.valueOf((byte) 127), featurePath.getByteValue(cas.getDocumentAnnotation()));
assertEquals("127", featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals(TypeClass.TYPE_CLASS_BYTE, featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(byteFeat.getRange(), featurePath.getType(cas.getDocumentAnnotation()));
assertEquals(null, featurePath.getValueAsString(null));
assertEquals(null, featurePath.getByteValue(null));
}
/**
* Tests advanced feature paths.
*/
@Test
void testAdvancedFeaturePaths() throws Exception {
XMLInputSource in = new XMLInputSource(
JUnitExtension.getFile("featurePathTests/FeaturePathTestTypeSystem.xml"));
TypeSystemDescription typeSystemDescription = UIMAFramework.getXMLParser()
.parseTypeSystemDescription(in);
CAS cas = CasCreationUtils.createCas(typeSystemDescription, null, null);
cas.setDocumentText("Sample Text");
// test feature path not set
String path = "/refFeature2";
FeaturePath featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(TypeClass.TYPE_CLASS_FS, featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(cas.getDocumentAnnotation().getType(),
featurePath.getType(cas.getDocumentAnnotation()));
assertEquals(null, featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals(null, featurePath.getFSValue(cas.getDocumentAnnotation()));
assertEquals(null, featurePath.getValueAsString(null));
assertEquals(null, featurePath.getFSValue(null));
// test feature path not set
path = "/refFeature/refFeature";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(null, featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(null, featurePath.getType(cas.getDocumentAnnotation()));
assertEquals(null, featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals(null, featurePath.getFSValue(cas.getDocumentAnnotation()));
// test reference feature path (slow lookup - path not always valid)
Feature stringFeat = cas.getDocumentAnnotation().getType()
.getFeatureByBaseName("stringFeature");
Feature refFeat = cas.getDocumentAnnotation().getType().getFeatureByBaseName("refFeature");
cas.getDocumentAnnotation().setStringValue(stringFeat, "MyExample");
cas.getDocumentAnnotation().setFeatureValue(refFeat, cas.getDocumentAnnotation());
path = "/refFeature/refFeature/stringFeature";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(TypeClass.TYPE_CLASS_STRING,
featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(stringFeat.getRange(), featurePath.getType(cas.getDocumentAnnotation()));
// test reference feature path (fast lookup - path always valid)
Feature ref2Feat = cas.getDocumentAnnotation().getType().getFeatureByBaseName("refFeature2");
cas.getDocumentAnnotation().setFeatureValue(ref2Feat, cas.getDocumentAnnotation());
path = "/refFeature2/refFeature2/stringFeature";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(TypeClass.TYPE_CLASS_STRING,
featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(stringFeat.getRange(), featurePath.getType(cas.getDocumentAnnotation()));
// test reference feature
path = "/refFeature2/refFeature2";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(TypeClass.TYPE_CLASS_FS, featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(cas.getDocumentAnnotation().getType(),
featurePath.getType(cas.getDocumentAnnotation()));
// test empty featurePath
featurePath = new FeaturePathImpl();
featurePath.initialize("");
assertEquals("", featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(cas.getDocumentAnnotation().toString(),
featurePath.getValueAsString(cas.getDocumentAnnotation()));
// test "/" featurePath
featurePath = new FeaturePathImpl();
featurePath.initialize("/");
assertEquals("/", featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(cas.getDocumentAnnotation().toString(),
featurePath.getValueAsString(cas.getDocumentAnnotation()));
// check init() with super type and call getValue() with subtype
featurePath = new FeaturePathImpl();
featurePath.initialize("/stringFeature");
Type testAnnotType = cas.getTypeSystem().getType("uima.tt.TestAnnotation");
featurePath.typeInit(testAnnotType);
Type testAnnotSubType = cas.getTypeSystem().getType("uima.tt.TestAnnotSub");
AnnotationFS fs = cas.createAnnotation(testAnnotSubType, 0, 1);
cas.addFsToIndexes(fs);
featurePath.getValueAsString(fs);
}
/**
* Tests the supported built-in functions for the feature path
*/
@Test
void testBuiltInFeaturePathFunctions() throws Exception {
XMLInputSource in = new XMLInputSource(
JUnitExtension.getFile("featurePathTests/FeaturePathTestTypeSystem.xml"));
TypeSystemDescription typeSystemDescription = UIMAFramework.getXMLParser()
.parseTypeSystemDescription(in);
CAS cas = CasCreationUtils.createCas(typeSystemDescription, null, null);
cas.setDocumentText("Sample Text");
Feature refFeat = cas.getDocumentAnnotation().getType().getFeatureByBaseName("refFeature");
cas.getDocumentAnnotation().setFeatureValue(refFeat, cas.getDocumentAnnotation());
// test fsId()
String docAnnotId = Integer.toString(cas.getDocumentAnnotation()._id());
String path = "/refFeature:fsId()";
FeaturePath featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(docAnnotId, featurePath.getValueAsString(cas.getDocumentAnnotation()));
// test fsId()
path = "/refFeature/refFeature/refFeature/refFeature:fsId()";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(docAnnotId, featurePath.getValueAsString(cas.getDocumentAnnotation()));
// test coveredText()
path = "/refFeature:coveredText()";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals("Sample Text", featurePath.getValueAsString(cas.getDocumentAnnotation()));
// test coveredText()
path = "/refFeature/refFeature/refFeature/refFeature/refFeature/refFeature:coveredText()";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals("Sample Text", featurePath.getValueAsString(cas.getDocumentAnnotation()));
// test typeName()
path = "/refFeature:typeName()";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals("uima.tcas.DocumentAnnotation",
featurePath.getValueAsString(cas.getDocumentAnnotation()));
// test typeName()
path = "/refFeature/refFeature/refFeature/refFeature/refFeature/refFeature/refFeature:typeName()";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals("uima.tcas.DocumentAnnotation",
featurePath.getValueAsString(cas.getDocumentAnnotation()));
// test typeName() on root
path = ":typeName()";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals("uima.tcas.DocumentAnnotation",
featurePath.getValueAsString(cas.getDocumentAnnotation()));
// test coveredText() on root
LowLevelCAS llc = cas.getLowLevelCAS();
path = "/:coveredText()";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(cas.getDocumentText(), featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals(cas.getDocumentText(),
featurePath.ll_getValueAsString(llc.ll_getFSRef(cas.getDocumentAnnotation()), llc));
assertEquals(cas.getDocumentAnnotation().getType(),
featurePath.getType(cas.getDocumentAnnotation()));
assertEquals(TypeClass.TYPE_CLASS_FS, featurePath.getTypeClass(cas.getDocumentAnnotation()));
// test fsId() on root
path = "/:fsId()";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
assertEquals(path, featurePath.getFeaturePath());
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(docAnnotId, featurePath.getValueAsString(cas.getDocumentAnnotation()));
}
/**
* Tests some error conditions for the feature path implementation
*/
@Test
void testErrorCases() throws Exception {
XMLInputSource in = new XMLInputSource(
JUnitExtension.getFile("featurePathTests/FeaturePathTestTypeSystem.xml"));
TypeSystemDescription typeSystemDescription = UIMAFramework.getXMLParser()
.parseTypeSystemDescription(in);
CAS cas = CasCreationUtils.createCas(typeSystemDescription, null, null);
cas.setDocumentText("Sample Text");
Feature stringFeat = cas.getDocumentAnnotation().getType()
.getFeatureByBaseName("stringFeature");
Feature refFeat = cas.getDocumentAnnotation().getType().getFeatureByBaseName("refFeature");
cas.getDocumentAnnotation().setStringValue(stringFeat, "MyExample");
cas.getDocumentAnnotation().setFeatureValue(refFeat, cas.getDocumentAnnotation());
// test featurePath = null
FeaturePath featurePath = new FeaturePathImpl();
try {
featurePath.initialize(null);
} catch (CASException ex) {
assertTrue(ex.getMessage().indexOf("Invalid featurePath") > -1);
}
// test featurePath syntax error
featurePath = new FeaturePathImpl();
try {
featurePath.initialize("feature//path");
} catch (CASException ex) {
assertTrue(ex.getMessage().indexOf("//") > -1);
}
// test non supported built-in function
featurePath = new FeaturePathImpl();
try {
featurePath.initialize("feature/path:test()");
} catch (CASException ex) {
assertTrue(ex.getMessage().indexOf("test()") > -1);
}
// test featurePath contains primitive feature in path
featurePath = new FeaturePathImpl();
try {
featurePath.initialize("/refFeature/stringFeature/refFeature");
// featurePath.typeSystemInit(cas.getDocumentAnnotation().getType());
System.out.println(featurePath.getStringValue(cas.getDocumentAnnotation()));
} catch (CASRuntimeException ex) {
assertTrue(ex.getMessage().indexOf("stringFeature") > -1);
}
// test featurePath feature not defined
featurePath = new FeaturePathImpl();
try {
featurePath.initialize("/refFeature/refFeatureNotDef");
// featurePath.typeSystemInit(cas.getDocumentAnnotation().getType());
featurePath.getValueAsString(cas.getDocumentAnnotation());
} catch (CASRuntimeException ex) {
assertTrue(ex.getMessage().indexOf("refFeatureNotDef") > -1);
}
// test featurePath function not supported
featurePath = new FeaturePathImpl();
try {
featurePath.initialize("/stringFeature:coveredText()");
featurePath.typeInit(cas.getDocumentAnnotation().getType());
featurePath.getValueAsString(cas.getDocumentAnnotation());
} catch (CASRuntimeException ex) {
assertTrue(ex.getMessage().indexOf("uima.cas.String") > -1);
}
// test featurePath function not supported
featurePath = new FeaturePathImpl();
try {
featurePath.initialize("/byteFeature:coveredText()");
featurePath.typeInit(cas.getDocumentAnnotation().getType());
featurePath.getValueAsString(cas.getDocumentAnnotation());
} catch (CASRuntimeException ex) {
assertTrue(ex.getMessage().indexOf("uima.cas.Byte") > -1);
}
// test array featurePath
featurePath = new FeaturePathImpl();
try {
featurePath.initialize("/refFeature/fsArray/refFeature");
featurePath.typeInit(cas.getDocumentAnnotation().getType());
featurePath.getValueAsString(cas.getDocumentAnnotation());
} catch (CASException ex) {
assertTrue(ex.getMessage().indexOf("uima.tcas.DocumentAnnotation") > -1);
}
// try to add a feature to the feature path with a built-in function
featurePath = new FeaturePathImpl();
try {
featurePath.initialize("/refFeature:coveredText()");
featurePath.addFeature(refFeat);
} catch (CASRuntimeException ex) {
assertTrue(ex.getMessage().indexOf("refFeature") > -1);
}
// use featurePath object with an different type than used for typeInit()
// and the case that type used for typeInit() has and featurePath that is
// not always valid
featurePath = new FeaturePathImpl();
try {
featurePath.initialize("/refFeature/stringFeature");
featurePath.typeInit(cas.getDocumentAnnotation().getType());
Type testAnnotType = cas.getTypeSystem().getType("uima.tt.TestAnnotation");
AnnotationFS fs = cas.createAnnotation(testAnnotType, 0, 1);
cas.addFsToIndexes(fs);
featurePath.getValueAsString(fs);
} catch (CASRuntimeException ex) {
assertTrue(ex.getMessage().indexOf("uima.tt.TestAnnotation") > -1);
}
// use featurePath object with an different type than used for typeInit()
// and the case that type used for typeInit() has and featurePath that is
// always valid
featurePath = new FeaturePathImpl();
try {
featurePath.initialize("/stringFeature");
featurePath.typeInit(cas.getDocumentAnnotation().getType());
Type testAnnotType = cas.getTypeSystem().getType("uima.tt.TestAnnotation");
AnnotationFS fs = cas.createAnnotation(testAnnotType, 0, 1);
cas.addFsToIndexes(fs);
featurePath.getValueAsString(fs);
} catch (CASRuntimeException ex) {
assertTrue(ex.getMessage().indexOf("uima.tt.TestAnnotation") > -1);
}
// pass null as FS
featurePath = new FeaturePathImpl();
featurePath.initialize("/refFeature:coveredText()");
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(null, featurePath.getValueAsString(null));
assertEquals(null, featurePath.getTypClass(null));
assertEquals(null, featurePath.getType(null));
}
/**
* Tests the addFeature() API
*/
@Test
void testAddAPI() throws Exception {
XMLInputSource in = new XMLInputSource(
JUnitExtension.getFile("featurePathTests/FeaturePathTestTypeSystem.xml"));
TypeSystemDescription typeSystemDescription = UIMAFramework.getXMLParser()
.parseTypeSystemDescription(in);
CAS cas = CasCreationUtils.createCas(typeSystemDescription, null, null);
cas.setDocumentText("Sample Text");
Feature stringFeat = cas.getDocumentAnnotation().getType()
.getFeatureByBaseName("stringFeature");
Feature refFeat = cas.getDocumentAnnotation().getType().getFeatureByBaseName("refFeature");
cas.getDocumentAnnotation().setStringValue(stringFeat, "MyExample");
cas.getDocumentAnnotation().setFeatureValue(refFeat, cas.getDocumentAnnotation());
// create featurePath with add() API
FeaturePath featurePath = new FeaturePathImpl();
featurePath.addFeature(refFeat);
featurePath.addFeature(stringFeat);
assertEquals("MyExample", featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals("/refFeature/stringFeature", featurePath.getFeaturePath());
assertTrue(featurePath.size() == 2);
assertTrue(featurePath.getFeature(1) == stringFeat);
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals("MyExample", featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals("MyExample", featurePath.getStringValue(cas.getDocumentAnnotation()));
assertTrue(featurePath.size() == 2);
assertTrue(featurePath.getFeature(1) == stringFeat);
// test path always valid after addFeature()
featurePath = new FeaturePathImpl();
featurePath.initialize("/refFeature2");
featurePath.typeInit(cas.getDocumentAnnotation().getType());
featurePath.addFeature(stringFeat);
// test path possible valid after addFeature()
featurePath = new FeaturePathImpl();
featurePath.initialize("/refFeature2");
featurePath.typeInit(cas.getDocumentAnnotation().getType());
featurePath.addFeature(refFeat);
featurePath.addFeature(stringFeat);
}
/**
* Tests the addFeature() API together with initialize()
*/
@Test
void testInitializeWithAddAPI() throws Exception {
XMLInputSource in = new XMLInputSource(
JUnitExtension.getFile("featurePathTests/FeaturePathTestTypeSystem.xml"));
TypeSystemDescription typeSystemDescription = UIMAFramework.getXMLParser()
.parseTypeSystemDescription(in);
CAS cas = CasCreationUtils.createCas(typeSystemDescription, null, null);
cas.setDocumentText("Sample Text");
Feature stringFeat = cas.getDocumentAnnotation().getType()
.getFeatureByBaseName("stringFeature");
Feature refFeat = cas.getDocumentAnnotation().getType().getFeatureByBaseName("refFeature2");
cas.getDocumentAnnotation().setStringValue(stringFeat, "MyExample");
cas.getDocumentAnnotation().setFeatureValue(refFeat, cas.getDocumentAnnotation());
FeaturePath featurePath = new FeaturePathImpl();
featurePath.initialize("/refFeature2");
featurePath.addFeature(stringFeat);
assertEquals("MyExample", featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals("/refFeature2/stringFeature", featurePath.getFeaturePath());
assertTrue(featurePath.size() == 2);
// test case change: new impl sets features as paths are traversed;
assertTrue(featurePath.getFeature(1) == stringFeat);
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals("MyExample", featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals("MyExample", featurePath.getStringValue(cas.getDocumentAnnotation()));
assertTrue(featurePath.size() == 2);
assertTrue(featurePath.getFeature(1) == stringFeat);
}
/*
* Tests all array types.
*/
@Test
void testArrayTypes() throws Exception {
XMLInputSource in = new XMLInputSource(
JUnitExtension.getFile("featurePathTests/FeaturePathTestTypeSystem.xml"));
TypeSystemDescription typeSystemDescription = UIMAFramework.getXMLParser()
.parseTypeSystemDescription(in);
CAS cas = CasCreationUtils.createCas(typeSystemDescription, null, null);
cas.setDocumentText("Sample Text");
// test stringArray feature
Feature stringArrayFeat = cas.getDocumentAnnotation().getType()
.getFeatureByBaseName("stringArray");
StringArrayFS stringArrayFS = cas.createStringArrayFS(4);
stringArrayFS.set(0, "Test0");
stringArrayFS.set(1, "Test1");
stringArrayFS.set(2, "Test2");
cas.getDocumentAnnotation().setFeatureValue(stringArrayFeat, stringArrayFS);
String path = "/stringArray";
FeaturePath featurePath = new FeaturePathImpl();
featurePath.initialize(path);
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(stringArrayFS, featurePath.getFSValue(cas.getDocumentAnnotation()));
assertEquals("Test0,Test1,Test2,null",
featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals(TypeClass.TYPE_CLASS_STRINGARRAY,
featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(stringArrayFeat.getRange(), featurePath.getType(cas.getDocumentAnnotation()));
// test shortArray feature
Feature shortArrayFeat = cas.getDocumentAnnotation().getType()
.getFeatureByBaseName("shortArray");
ShortArrayFS shortArrayFS = cas.createShortArrayFS(3);
shortArrayFS.set(0, (short) 0);
shortArrayFS.set(1, (short) 2);
shortArrayFS.set(2, (short) 54);
cas.getDocumentAnnotation().setFeatureValue(shortArrayFeat, shortArrayFS);
path = "/shortArray";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(shortArrayFS, featurePath.getFSValue(cas.getDocumentAnnotation()));
assertEquals("0,2,54", featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals(TypeClass.TYPE_CLASS_SHORTARRAY,
featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(shortArrayFeat.getRange(), featurePath.getType(cas.getDocumentAnnotation()));
// test floatArray feature
Feature floatArrayFeat = cas.getDocumentAnnotation().getType()
.getFeatureByBaseName("floatArray");
FloatArrayFS floatArrayFS = cas.createFloatArrayFS(3);
floatArrayFS.set(0, 1.4f);
floatArrayFS.set(1, 0f);
floatArrayFS.set(2, 3434.34f);
cas.getDocumentAnnotation().setFeatureValue(floatArrayFeat, floatArrayFS);
path = "/floatArray";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(floatArrayFS, featurePath.getFSValue(cas.getDocumentAnnotation()));
assertEquals("1.4,0.0,3434.34", featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals(TypeClass.TYPE_CLASS_FLOATARRAY,
featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(floatArrayFeat.getRange(), featurePath.getType(cas.getDocumentAnnotation()));
// test doubleArray feature
Feature doubleArrayFeat = cas.getDocumentAnnotation().getType()
.getFeatureByBaseName("doubleArray");
DoubleArrayFS doubleArrayFS = cas.createDoubleArrayFS(3);
doubleArrayFS.set(0, 1.4);
doubleArrayFS.set(1, 0);
doubleArrayFS.set(2, 3434.34);
cas.getDocumentAnnotation().setFeatureValue(doubleArrayFeat, doubleArrayFS);
path = "/doubleArray";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(doubleArrayFS, featurePath.getFSValue(cas.getDocumentAnnotation()));
assertEquals("1.4,0.0,3434.34", featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals(TypeClass.TYPE_CLASS_DOUBLEARRAY,
featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(doubleArrayFeat.getRange(), featurePath.getType(cas.getDocumentAnnotation()));
// test longArray feature
Feature longArrayFeat = cas.getDocumentAnnotation().getType().getFeatureByBaseName("longArray");
LongArrayFS longArrayFS = cas.createLongArrayFS(3);
longArrayFS.set(0, 14);
longArrayFS.set(1, 0);
longArrayFS.set(2, 343434);
cas.getDocumentAnnotation().setFeatureValue(longArrayFeat, longArrayFS);
path = "/longArray";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(longArrayFS, featurePath.getFSValue(cas.getDocumentAnnotation()));
assertEquals("14,0,343434", featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals(TypeClass.TYPE_CLASS_LONGARRAY,
featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(longArrayFeat.getRange(), featurePath.getType(cas.getDocumentAnnotation()));
// test intArray feature
Feature intArrayFeat = cas.getDocumentAnnotation().getType().getFeatureByBaseName("intArray");
IntArrayFS intArrayFS = cas.createIntArrayFS(3);
intArrayFS.set(0, 14);
intArrayFS.set(1, 0);
intArrayFS.set(2, 343);
cas.getDocumentAnnotation().setFeatureValue(intArrayFeat, intArrayFS);
path = "/intArray";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(intArrayFS, featurePath.getFSValue(cas.getDocumentAnnotation()));
assertEquals("14,0,343", featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals(TypeClass.TYPE_CLASS_INTARRAY,
featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(intArrayFeat.getRange(), featurePath.getType(cas.getDocumentAnnotation()));
// test booleanArray feature
Feature booleanArrayFeat = cas.getDocumentAnnotation().getType()
.getFeatureByBaseName("booleanArray");
BooleanArrayFS booleanArrayFS = cas.createBooleanArrayFS(3);
booleanArrayFS.set(0, true);
booleanArrayFS.set(1, false);
booleanArrayFS.set(2, true);
cas.getDocumentAnnotation().setFeatureValue(booleanArrayFeat, booleanArrayFS);
path = "/booleanArray";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(booleanArrayFS, featurePath.getFSValue(cas.getDocumentAnnotation()));
assertEquals("true,false,true", featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals(TypeClass.TYPE_CLASS_BOOLEANARRAY,
featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(booleanArrayFeat.getRange(), featurePath.getType(cas.getDocumentAnnotation()));
// test byteArray feature
Feature byteArrayFeat = cas.getDocumentAnnotation().getType().getFeatureByBaseName("byteArray");
ByteArrayFS byteArrayFS = cas.createByteArrayFS(3);
byteArrayFS.set(0, (byte) 23);
byteArrayFS.set(1, (byte) 47);
byteArrayFS.set(2, (byte) 11);
cas.getDocumentAnnotation().setFeatureValue(byteArrayFeat, byteArrayFS);
path = "/byteArray";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(byteArrayFS, featurePath.getFSValue(cas.getDocumentAnnotation()));
assertEquals("23,47,11", featurePath.getValueAsString(cas.getDocumentAnnotation()));
assertEquals(TypeClass.TYPE_CLASS_BYTEARRAY,
featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(byteArrayFeat.getRange(), featurePath.getType(cas.getDocumentAnnotation()));
// test fsArray feature
Feature fsArrayFeat = cas.getDocumentAnnotation().getType().getFeatureByBaseName("fsArray");
ArrayFS fsArrayFS = cas.createArrayFS(2);
fsArrayFS.set(0, cas.getDocumentAnnotation());
fsArrayFS.set(1, cas.getDocumentAnnotation());
fsArrayFS.toStringArray();
cas.getDocumentAnnotation().setFeatureValue(fsArrayFeat, fsArrayFS);
path = "/fsArray";
featurePath = new FeaturePathImpl();
featurePath.initialize(path);
featurePath.typeInit(cas.getDocumentAnnotation().getType());
assertEquals(fsArrayFS, featurePath.getFSValue(cas.getDocumentAnnotation()));
assertTrue(featurePath.getValueAsString(cas.getDocumentAnnotation()).indexOf("11") > 0);
assertEquals(TypeClass.TYPE_CLASS_FSARRAY,
featurePath.getTypeClass(cas.getDocumentAnnotation()));
assertEquals(fsArrayFeat.getRange(), featurePath.getType(cas.getDocumentAnnotation()));
}
}
|
googleapis/google-cloud-java | 37,713 | java-dialogflow/proto-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/ListMessagesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2/conversation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2;
/**
*
*
* <pre>
* The request message for
* [Conversations.ListMessages][google.cloud.dialogflow.v2.Conversations.ListMessages].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.ListMessagesRequest}
*/
public final class ListMessagesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.ListMessagesRequest)
ListMessagesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListMessagesRequest.newBuilder() to construct.
private ListMessagesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListMessagesRequest() {
parent_ = "";
filter_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListMessagesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.ConversationProto
.internal_static_google_cloud_dialogflow_v2_ListMessagesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.ConversationProto
.internal_static_google_cloud_dialogflow_v2_ListMessagesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.ListMessagesRequest.class,
com.google.cloud.dialogflow.v2.ListMessagesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the conversation to list messages for.
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the conversation to list messages for.
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Filter on message fields. Currently predicates on `create_time`
* and `create_time_epoch_microseconds` are supported. `create_time` only
* support milliseconds accuracy. E.g.,
* `create_time_epoch_microseconds > 1551790877964485` or
* `create_time > 2017-01-15T01:30:15.01Z`.
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Filter on message fields. Currently predicates on `create_time`
* and `create_time_epoch_microseconds` are supported. `create_time` only
* support milliseconds accuracy. E.g.,
* `create_time_epoch_microseconds > 1551790877964485` or
* `create_time > 2017-01-15T01:30:15.01Z`.
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By
* default 100 and at most 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2.ListMessagesRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2.ListMessagesRequest other =
(com.google.cloud.dialogflow.v2.ListMessagesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2.ListMessagesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.ListMessagesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListMessagesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.ListMessagesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListMessagesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.ListMessagesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListMessagesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.ListMessagesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListMessagesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.ListMessagesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListMessagesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.ListMessagesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.dialogflow.v2.ListMessagesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [Conversations.ListMessages][google.cloud.dialogflow.v2.Conversations.ListMessages].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.ListMessagesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.ListMessagesRequest)
com.google.cloud.dialogflow.v2.ListMessagesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.ConversationProto
.internal_static_google_cloud_dialogflow_v2_ListMessagesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.ConversationProto
.internal_static_google_cloud_dialogflow_v2_ListMessagesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.ListMessagesRequest.class,
com.google.cloud.dialogflow.v2.ListMessagesRequest.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2.ListMessagesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
filter_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2.ConversationProto
.internal_static_google_cloud_dialogflow_v2_ListMessagesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListMessagesRequest getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2.ListMessagesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListMessagesRequest build() {
com.google.cloud.dialogflow.v2.ListMessagesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListMessagesRequest buildPartial() {
com.google.cloud.dialogflow.v2.ListMessagesRequest result =
new com.google.cloud.dialogflow.v2.ListMessagesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dialogflow.v2.ListMessagesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2.ListMessagesRequest) {
return mergeFrom((com.google.cloud.dialogflow.v2.ListMessagesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2.ListMessagesRequest other) {
if (other == com.google.cloud.dialogflow.v2.ListMessagesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the conversation to list messages for.
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the conversation to list messages for.
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the conversation to list messages for.
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the conversation to list messages for.
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the conversation to list messages for.
* Format: `projects/<Project ID>/locations/<Location
* ID>/conversations/<Conversation ID>`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Filter on message fields. Currently predicates on `create_time`
* and `create_time_epoch_microseconds` are supported. `create_time` only
* support milliseconds accuracy. E.g.,
* `create_time_epoch_microseconds > 1551790877964485` or
* `create_time > 2017-01-15T01:30:15.01Z`.
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Filter on message fields. Currently predicates on `create_time`
* and `create_time_epoch_microseconds` are supported. `create_time` only
* support milliseconds accuracy. E.g.,
* `create_time_epoch_microseconds > 1551790877964485` or
* `create_time > 2017-01-15T01:30:15.01Z`.
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Filter on message fields. Currently predicates on `create_time`
* and `create_time_epoch_microseconds` are supported. `create_time` only
* support milliseconds accuracy. E.g.,
* `create_time_epoch_microseconds > 1551790877964485` or
* `create_time > 2017-01-15T01:30:15.01Z`.
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Filter on message fields. Currently predicates on `create_time`
* and `create_time_epoch_microseconds` are supported. `create_time` only
* support milliseconds accuracy. E.g.,
* `create_time_epoch_microseconds > 1551790877964485` or
* `create_time > 2017-01-15T01:30:15.01Z`.
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Filter on message fields. Currently predicates on `create_time`
* and `create_time_epoch_microseconds` are supported. `create_time` only
* support milliseconds accuracy. E.g.,
* `create_time_epoch_microseconds > 1551790877964485` or
* `create_time > 2017-01-15T01:30:15.01Z`.
*
* For more information about filtering, see
* [API Filtering](https://aip.dev/160).
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By
* default 100 and at most 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By
* default 100 and at most 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By
* default 100 and at most 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.ListMessagesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.ListMessagesRequest)
private static final com.google.cloud.dialogflow.v2.ListMessagesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2.ListMessagesRequest();
}
public static com.google.cloud.dialogflow.v2.ListMessagesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListMessagesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListMessagesRequest>() {
@java.lang.Override
public ListMessagesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListMessagesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListMessagesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListMessagesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,762 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/SetIamPolicySnapshotRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* A request message for Snapshots.SetIamPolicy. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.SetIamPolicySnapshotRequest}
*/
public final class SetIamPolicySnapshotRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.SetIamPolicySnapshotRequest)
SetIamPolicySnapshotRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use SetIamPolicySnapshotRequest.newBuilder() to construct.
private SetIamPolicySnapshotRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SetIamPolicySnapshotRequest() {
project_ = "";
resource_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SetIamPolicySnapshotRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SetIamPolicySnapshotRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SetIamPolicySnapshotRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.SetIamPolicySnapshotRequest.class,
com.google.cloud.compute.v1.SetIamPolicySnapshotRequest.Builder.class);
}
private int bitField0_;
public static final int GLOBAL_SET_POLICY_REQUEST_RESOURCE_FIELD_NUMBER = 337048498;
private com.google.cloud.compute.v1.GlobalSetPolicyRequest globalSetPolicyRequestResource_;
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the globalSetPolicyRequestResource field is set.
*/
@java.lang.Override
public boolean hasGlobalSetPolicyRequestResource() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The globalSetPolicyRequestResource.
*/
@java.lang.Override
public com.google.cloud.compute.v1.GlobalSetPolicyRequest getGlobalSetPolicyRequestResource() {
return globalSetPolicyRequestResource_ == null
? com.google.cloud.compute.v1.GlobalSetPolicyRequest.getDefaultInstance()
: globalSetPolicyRequestResource_;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.GlobalSetPolicyRequestOrBuilder
getGlobalSetPolicyRequestResourceOrBuilder() {
return globalSetPolicyRequestResource_ == null
? com.google.cloud.compute.v1.GlobalSetPolicyRequest.getDefaultInstance()
: globalSetPolicyRequestResource_;
}
public static final int PROJECT_FIELD_NUMBER = 227560217;
@SuppressWarnings("serial")
private volatile java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RESOURCE_FIELD_NUMBER = 195806222;
@SuppressWarnings("serial")
private volatile java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
@java.lang.Override
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
@java.lang.Override
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(337048498, getGlobalSetPolicyRequestResource());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
337048498, getGlobalSetPolicyRequestResource());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.SetIamPolicySnapshotRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.SetIamPolicySnapshotRequest other =
(com.google.cloud.compute.v1.SetIamPolicySnapshotRequest) obj;
if (hasGlobalSetPolicyRequestResource() != other.hasGlobalSetPolicyRequestResource())
return false;
if (hasGlobalSetPolicyRequestResource()) {
if (!getGlobalSetPolicyRequestResource().equals(other.getGlobalSetPolicyRequestResource()))
return false;
}
if (!getProject().equals(other.getProject())) return false;
if (!getResource().equals(other.getResource())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasGlobalSetPolicyRequestResource()) {
hash = (37 * hash) + GLOBAL_SET_POLICY_REQUEST_RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getGlobalSetPolicyRequestResource().hashCode();
}
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getResource().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.SetIamPolicySnapshotRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.SetIamPolicySnapshotRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.SetIamPolicySnapshotRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.SetIamPolicySnapshotRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.SetIamPolicySnapshotRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.SetIamPolicySnapshotRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.SetIamPolicySnapshotRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.SetIamPolicySnapshotRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.SetIamPolicySnapshotRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.SetIamPolicySnapshotRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.SetIamPolicySnapshotRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.SetIamPolicySnapshotRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.SetIamPolicySnapshotRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request message for Snapshots.SetIamPolicy. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.SetIamPolicySnapshotRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.SetIamPolicySnapshotRequest)
com.google.cloud.compute.v1.SetIamPolicySnapshotRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SetIamPolicySnapshotRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SetIamPolicySnapshotRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.SetIamPolicySnapshotRequest.class,
com.google.cloud.compute.v1.SetIamPolicySnapshotRequest.Builder.class);
}
// Construct using com.google.cloud.compute.v1.SetIamPolicySnapshotRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getGlobalSetPolicyRequestResourceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
globalSetPolicyRequestResource_ = null;
if (globalSetPolicyRequestResourceBuilder_ != null) {
globalSetPolicyRequestResourceBuilder_.dispose();
globalSetPolicyRequestResourceBuilder_ = null;
}
project_ = "";
resource_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_SetIamPolicySnapshotRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.SetIamPolicySnapshotRequest getDefaultInstanceForType() {
return com.google.cloud.compute.v1.SetIamPolicySnapshotRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.SetIamPolicySnapshotRequest build() {
com.google.cloud.compute.v1.SetIamPolicySnapshotRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.SetIamPolicySnapshotRequest buildPartial() {
com.google.cloud.compute.v1.SetIamPolicySnapshotRequest result =
new com.google.cloud.compute.v1.SetIamPolicySnapshotRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.compute.v1.SetIamPolicySnapshotRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.globalSetPolicyRequestResource_ =
globalSetPolicyRequestResourceBuilder_ == null
? globalSetPolicyRequestResource_
: globalSetPolicyRequestResourceBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.project_ = project_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.resource_ = resource_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.SetIamPolicySnapshotRequest) {
return mergeFrom((com.google.cloud.compute.v1.SetIamPolicySnapshotRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.SetIamPolicySnapshotRequest other) {
if (other == com.google.cloud.compute.v1.SetIamPolicySnapshotRequest.getDefaultInstance())
return this;
if (other.hasGlobalSetPolicyRequestResource()) {
mergeGlobalSetPolicyRequestResource(other.getGlobalSetPolicyRequestResource());
}
if (!other.getProject().isEmpty()) {
project_ = other.project_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getResource().isEmpty()) {
resource_ = other.resource_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 1566449778:
{
resource_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 1566449778
case 1820481738:
{
project_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 1820481738
case -1598579310:
{
input.readMessage(
getGlobalSetPolicyRequestResourceFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case -1598579310
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.compute.v1.GlobalSetPolicyRequest globalSetPolicyRequestResource_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.GlobalSetPolicyRequest,
com.google.cloud.compute.v1.GlobalSetPolicyRequest.Builder,
com.google.cloud.compute.v1.GlobalSetPolicyRequestOrBuilder>
globalSetPolicyRequestResourceBuilder_;
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the globalSetPolicyRequestResource field is set.
*/
public boolean hasGlobalSetPolicyRequestResource() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The globalSetPolicyRequestResource.
*/
public com.google.cloud.compute.v1.GlobalSetPolicyRequest getGlobalSetPolicyRequestResource() {
if (globalSetPolicyRequestResourceBuilder_ == null) {
return globalSetPolicyRequestResource_ == null
? com.google.cloud.compute.v1.GlobalSetPolicyRequest.getDefaultInstance()
: globalSetPolicyRequestResource_;
} else {
return globalSetPolicyRequestResourceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setGlobalSetPolicyRequestResource(
com.google.cloud.compute.v1.GlobalSetPolicyRequest value) {
if (globalSetPolicyRequestResourceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
globalSetPolicyRequestResource_ = value;
} else {
globalSetPolicyRequestResourceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setGlobalSetPolicyRequestResource(
com.google.cloud.compute.v1.GlobalSetPolicyRequest.Builder builderForValue) {
if (globalSetPolicyRequestResourceBuilder_ == null) {
globalSetPolicyRequestResource_ = builderForValue.build();
} else {
globalSetPolicyRequestResourceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeGlobalSetPolicyRequestResource(
com.google.cloud.compute.v1.GlobalSetPolicyRequest value) {
if (globalSetPolicyRequestResourceBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& globalSetPolicyRequestResource_ != null
&& globalSetPolicyRequestResource_
!= com.google.cloud.compute.v1.GlobalSetPolicyRequest.getDefaultInstance()) {
getGlobalSetPolicyRequestResourceBuilder().mergeFrom(value);
} else {
globalSetPolicyRequestResource_ = value;
}
} else {
globalSetPolicyRequestResourceBuilder_.mergeFrom(value);
}
if (globalSetPolicyRequestResource_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearGlobalSetPolicyRequestResource() {
bitField0_ = (bitField0_ & ~0x00000001);
globalSetPolicyRequestResource_ = null;
if (globalSetPolicyRequestResourceBuilder_ != null) {
globalSetPolicyRequestResourceBuilder_.dispose();
globalSetPolicyRequestResourceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.compute.v1.GlobalSetPolicyRequest.Builder
getGlobalSetPolicyRequestResourceBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getGlobalSetPolicyRequestResourceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.compute.v1.GlobalSetPolicyRequestOrBuilder
getGlobalSetPolicyRequestResourceOrBuilder() {
if (globalSetPolicyRequestResourceBuilder_ != null) {
return globalSetPolicyRequestResourceBuilder_.getMessageOrBuilder();
} else {
return globalSetPolicyRequestResource_ == null
? com.google.cloud.compute.v1.GlobalSetPolicyRequest.getDefaultInstance()
: globalSetPolicyRequestResource_;
}
}
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.GlobalSetPolicyRequest global_set_policy_request_resource = 337048498 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.GlobalSetPolicyRequest,
com.google.cloud.compute.v1.GlobalSetPolicyRequest.Builder,
com.google.cloud.compute.v1.GlobalSetPolicyRequestOrBuilder>
getGlobalSetPolicyRequestResourceFieldBuilder() {
if (globalSetPolicyRequestResourceBuilder_ == null) {
globalSetPolicyRequestResourceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.GlobalSetPolicyRequest,
com.google.cloud.compute.v1.GlobalSetPolicyRequest.Builder,
com.google.cloud.compute.v1.GlobalSetPolicyRequestOrBuilder>(
getGlobalSetPolicyRequestResource(), getParentForChildren(), isClean());
globalSetPolicyRequestResource_ = null;
}
return globalSetPolicyRequestResourceBuilder_;
}
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The resource to set.
* @return This builder for chaining.
*/
public Builder setResource(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resource_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearResource() {
resource_ = getDefaultInstance().getResource();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for resource to set.
* @return This builder for chaining.
*/
public Builder setResourceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resource_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.SetIamPolicySnapshotRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.SetIamPolicySnapshotRequest)
private static final com.google.cloud.compute.v1.SetIamPolicySnapshotRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.SetIamPolicySnapshotRequest();
}
public static com.google.cloud.compute.v1.SetIamPolicySnapshotRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SetIamPolicySnapshotRequest> PARSER =
new com.google.protobuf.AbstractParser<SetIamPolicySnapshotRequest>() {
@java.lang.Override
public SetIamPolicySnapshotRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SetIamPolicySnapshotRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SetIamPolicySnapshotRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.SetIamPolicySnapshotRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/drill | 37,118 | exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetComplex.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.store.parquet;
import java.math.BigDecimal;
import org.apache.drill.exec.util.JsonStringArrayList;
import org.apache.drill.test.BaseTestQuery;
import org.apache.drill.test.TestBuilder;
import org.junit.Test;
public class TestParquetComplex extends BaseTestQuery {
private static final String DATAFILE = "cp.`store/parquet/complex/complex.parquet`";
@Test
public void sort() throws Exception {
String query = String.format("select * from %s order by amount", DATAFILE);
testBuilder()
.sqlQuery(query)
.ordered()
.jsonBaselineFile("store/parquet/complex/baseline_sorted.json")
.build()
.run();
}
@Test
public void topN() throws Exception {
String query = String.format("select * from %s order by amount limit 5", DATAFILE);
testBuilder()
.sqlQuery(query)
.ordered()
.jsonBaselineFile("store/parquet/complex/baseline_sorted.json")
.build()
.run();
}
@Test
public void hashJoin() throws Exception{
String query = String.format("select t1.amount, t1.`date`, t1.marketing_info, t1.`time`, t1.trans_id, t1.trans_info, t1.user_info " +
"from %s t1, %s t2 where t1.amount = t2.amount", DATAFILE, DATAFILE);
testBuilder()
.sqlQuery(query)
.unOrdered()
.jsonBaselineFile("store/parquet/complex/baseline.json")
.build()
.run();
}
@Test
public void mergeJoin() throws Exception{
test("alter session set `planner.enable_hashjoin` = false");
String query = String.format("select t1.amount, t1.`date`, t1.marketing_info, t1.`time`, t1.trans_id, t1.trans_info, t1.user_info " +
"from %s t1, %s t2 where t1.amount = t2.amount", DATAFILE, DATAFILE);
testBuilder()
.sqlQuery(query)
.unOrdered()
.jsonBaselineFile("store/parquet/complex/baseline.json")
.build()
.run();
}
@Test
public void selectAllColumns() throws Exception {
String query = String.format("select amount, `date`, marketing_info, `time`, trans_id, trans_info, user_info from %s", DATAFILE);
testBuilder()
.sqlQuery(query)
.ordered()
.jsonBaselineFile("store/parquet/complex/baseline.json")
.build()
.run();
}
@Test
public void selectMap() throws Exception {
String query = "select marketing_info from cp.`store/parquet/complex/complex.parquet`";
testBuilder()
.sqlQuery(query)
.ordered()
.jsonBaselineFile("store/parquet/complex/baseline5.json")
.build()
.run();
}
@Test
public void selectMapAndElements() throws Exception {
String query = "select marketing_info, t.marketing_info.camp_id as camp_id, t.marketing_info.keywords[2] as keyword2 from cp.`store/parquet/complex/complex.parquet` t";
testBuilder()
.sqlQuery(query)
.ordered()
.jsonBaselineFile("store/parquet/complex/baseline6.json")
.build()
.run();
}
@Test
public void selectMultiElements() throws Exception {
String query = "select t.marketing_info.camp_id as camp_id, t.marketing_info.keywords as keywords from cp.`store/parquet/complex/complex.parquet` t";
testBuilder()
.sqlQuery(query)
.ordered()
.jsonBaselineFile("store/parquet/complex/baseline7.json")
.build()
.run();
}
@Test
public void testStar() throws Exception {
testBuilder()
.sqlQuery("select * from cp.`store/parquet/complex/complex.parquet`")
.ordered()
.jsonBaselineFile("store/parquet/complex/baseline.json")
.build()
.run();
}
@Test
public void missingColumnInMap() throws Exception {
String query = "select t.trans_info.keywords as keywords from cp.`store/parquet/complex/complex.parquet` t";
String[] columns = {"keywords"};
testBuilder()
.sqlQuery(query)
.ordered()
.jsonBaselineFile("store/parquet/complex/baseline2.json")
.baselineColumns(columns)
.build()
.run();
}
@Test
public void secondElementInMap() throws Exception {
String query = String.format("select t.`marketing_info`.keywords as keywords from %s t", DATAFILE);
String[] columns = {"keywords"};
testBuilder()
.sqlQuery(query)
.ordered()
.jsonBaselineFile("store/parquet/complex/baseline3.json")
.baselineColumns(columns)
.build()
.run();
}
@Test
public void elementsOfArray() throws Exception {
String query = String.format("select t.`marketing_info`.keywords[0] as keyword0, t.`marketing_info`.keywords[2] as keyword2 from %s t", DATAFILE);
String[] columns = {"keyword0", "keyword2"};
testBuilder()
.sqlQuery(query)
.unOrdered()
.jsonBaselineFile("store/parquet/complex/baseline4.json")
.baselineColumns(columns)
.build()
.run();
}
@Test
public void elementsOfArrayCaseInsensitive() throws Exception {
String query = String.format("select t.`MARKETING_INFO`.keywords[0] as keyword0, t.`Marketing_Info`.Keywords[2] as keyword2 from %s t", DATAFILE);
String[] columns = {"keyword0", "keyword2"};
testBuilder()
.sqlQuery(query)
.unOrdered()
.jsonBaselineFile("store/parquet/complex/baseline4.json")
.baselineColumns(columns)
.build()
.run();
}
@Test //DRILL-3533
public void notxistsField() throws Exception {
String query = String.format("select t.`marketing_info`.notexists as notexists1,\n" +
"t.`marketing_info`.camp_id as id,\n" +
"t.`marketing_info.camp_id` as notexists2\n" +
"from %s t", DATAFILE);
String[] columns = {"notexists1", "id", "notexists2"};
testBuilder()
.sqlQuery(query)
.unOrdered()
.jsonBaselineFile("store/parquet/complex/baseline8.json")
.baselineColumns(columns)
.build()
.run();
}
@Test
public void testReadRepeatedDecimals() throws Exception {
JsonStringArrayList<BigDecimal> ints = new JsonStringArrayList<>();
ints.add(new BigDecimal("999999.999"));
ints.add(new BigDecimal("-999999.999"));
ints.add(new BigDecimal("0.000"));
JsonStringArrayList<BigDecimal> longs = new JsonStringArrayList<>();
longs.add(new BigDecimal("999999999.999999999"));
longs.add(new BigDecimal("-999999999.999999999"));
longs.add(new BigDecimal("0.000000000"));
JsonStringArrayList<BigDecimal> fixedLen = new JsonStringArrayList<>();
fixedLen.add(new BigDecimal("999999999999.999999"));
fixedLen.add(new BigDecimal("-999999999999.999999"));
fixedLen.add(new BigDecimal("0.000000"));
testBuilder()
.sqlQuery("select * from cp.`parquet/repeatedIntLondFixedLenBinaryDecimal.parquet`")
.unOrdered()
.baselineColumns("decimal_int32", "decimal_int64", "decimal_fixedLen", "decimal_binary")
.baselineValues(ints, longs, fixedLen, fixedLen)
.go();
}
@Test
public void selectDictBigIntValue() throws Exception {
String query = "select order_items from cp.`store/parquet/complex/simple_map.parquet`";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("order_items")
.baselineValues(TestBuilder.mapOfObject("Pencils", 1L))
.go();
}
@Test
public void selectDictStructValue() throws Exception {
String query = "select id, mapcol4 from cp.`store/parquet/complex/map/parquet/000000_0.parquet`";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("id", "mapcol4")
.baselineValues(1,
TestBuilder.mapOfObject(
101L,
TestBuilder.mapOfObject(false, "item_amount", 1L, "item_type", "pencil"),
102L,
TestBuilder.mapOfObject(false, "item_amount", 2L, "item_type", "eraser")
)
)
.baselineValues(2,
TestBuilder.mapOfObject(
102L,
TestBuilder.mapOfObject(false, "item_amount", 3L, "item_type", "pen"),
103L,
TestBuilder.mapOfObject(false, "item_amount", 4L, "item_type", "scissors")
)
)
.baselineValues(3,
TestBuilder.mapOfObject(
110L,
TestBuilder.mapOfObject(false, "item_amount", 5L, "item_type", "glue"),
113L,
TestBuilder.mapOfObject(false, "item_amount", 6L, "item_type", "pencil")
)
)
.baselineValues(4,
TestBuilder.mapOfObject(
238L,
TestBuilder.mapOfObject(false, "item_amount", 7L, "item_type", "pen"),
239L,
TestBuilder.mapOfObject(false, "item_amount", 8L, "item_type", "eraser"),
240L,
TestBuilder.mapOfObject(false, "item_amount", 9L, "item_type", "scissors"),
241L,
TestBuilder.mapOfObject(false, "item_amount", 10L, "item_type", "glue")
)
)
.baselineValues(5,
TestBuilder.mapOfObject(
242L,
TestBuilder.mapOfObject(false, "item_amount", 11L, "item_type", "paper"),
243L,
TestBuilder.mapOfObject(false, "item_amount", 13L, "item_type", "ink")
)
)
.go();
}
@Test
public void selectDictIntArrayValue() throws Exception {
String query = "select id, mapcol5 from cp.`store/parquet/complex/map/parquet/000000_0.parquet` order by id asc";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("id", "mapcol5")
.baselineValues(
1, TestBuilder.mapOfObject(
3, TestBuilder.listOf(3, 4, 5),
5, TestBuilder.listOf(5, 3)
)
)
.baselineValues(
2, TestBuilder.mapOfObject(
1, TestBuilder.listOf(1, 2, 3, 4, 5)
)
)
.baselineValues(
3, TestBuilder.mapOfObject(
1, TestBuilder.listOf(1, 2, 3, 4, 5),
2, TestBuilder.listOf(2, 3)
)
)
.baselineValues(
4, TestBuilder.mapOfObject(
1, TestBuilder.listOf(3, 4, 5, 10, -2, -4),
5, TestBuilder.listOf(), // this actually contains a null element
-2, TestBuilder.listOf(2, 2, 2, 2),
8, TestBuilder.listOf(2, 2, 3, 4)
)
)
.baselineValues(
5, TestBuilder.mapOfObject(
2, TestBuilder.listOf(5),
3, TestBuilder.listOf(8, -5, 3, 4)
)
)
.go();
}
@Test
public void selectDictIntArrayValueGetByKey() throws Exception {
String query = "select id, mapcol5[1] as val from cp.`store/parquet/complex/map/parquet/000000_0.parquet` order by id asc";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("id", "val")
.baselineValues(1, TestBuilder.listOf())
.baselineValues(2, TestBuilder.listOf(1, 2, 3, 4, 5))
.baselineValues(3, TestBuilder.listOf(1, 2, 3, 4, 5))
.baselineValues(4, TestBuilder.listOf(3, 4, 5, 10, -2, -4))
.baselineValues(5, TestBuilder.listOf())
.go();
}
@Test
public void selectDictDictValue() throws Exception {
String query = "select id, mapcol3 from cp.`store/parquet/complex/map/parquet/000000_0.parquet` order by id asc";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("id", "mapcol3")
.baselineValues(1, TestBuilder.mapOfObject(
3, TestBuilder.mapOfObject("a", 1L, "b", 2L),
4, TestBuilder.mapOfObject("c", 3L),
5, TestBuilder.mapOfObject("d", 4L, "e", 5L)
)
)
.baselineValues(2, TestBuilder.mapOfObject(
1, TestBuilder.mapOfObject("a", 1L, "b", 2L)
)
)
.baselineValues(3, TestBuilder.mapOfObject(
2, TestBuilder.mapOfObject("a", 1L, "b", 2L),
3, TestBuilder.mapOfObject("C", 3L)
)
)
.baselineValues(4, TestBuilder.mapOfObject(
2, TestBuilder.mapOfObject("abc", 1L, "bce", 2L),
4, TestBuilder.mapOfObject("c", 3L, "cf", 6L),
5, TestBuilder.mapOfObject("d", 4L, "eh", 5L),
8, TestBuilder.mapOfObject("d", 32L, "e", -17L)
)
)
.baselineValues(5, TestBuilder.mapOfObject(
1, TestBuilder.mapOfObject("bee", -2L, "awg", 1L),
2, TestBuilder.mapOfObject("cddd", 3L),
4, TestBuilder.mapOfObject("deea", 4L, "eerie", 99L)
)
)
.go();
}
@Test
public void selectDictGetByIntKeyComplexValue() throws Exception {
String query = "select id, mapcol3[3] as val from cp.`store/parquet/complex/map/parquet/000000_0.parquet`";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("id", "val")
.baselineValues(4, TestBuilder.mapOfObject())
.baselineValues(1, TestBuilder.mapOfObject("a", 1L, "b", 2L))
.baselineValues(3, TestBuilder.mapOfObject("C", 3L))
.baselineValues(2, TestBuilder.mapOfObject())
.baselineValues(5, TestBuilder.mapOfObject())
.go();
}
@Test
public void selectDictGetByStringKey() throws Exception {
String query = "select mapcol['a'] val from cp.`store/parquet/complex/map/parquet/000000_0.parquet` order by id asc";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("val")
.baselineValuesForSingleColumn(null, 1, null, 3, 8)
.go();
}
@Test
public void selectDictGetByStringKey2() throws Exception {
String query = "select id, mapcol['b'] val from cp.`store/parquet/complex/map/parquet/000000_0.parquet`";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("id", "val")
.baselineValues(4, 4)
.baselineValues(1, 6)
.baselineValues(3, null)
.baselineValues(2, 2)
.baselineValues(5, 6)
.go();
}
@Test
public void selectDictByKeyComplexValue2() throws Exception {
String query = "select id, mapcol3[4]['c'] val from cp.`store/parquet/complex/map/parquet/000000_0.parquet`";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("id", "val")
.baselineValues(4, 3L)
.baselineValues(1, 3L)
.baselineValues(3, null)
.baselineValues(2, null)
.baselineValues(5, null)
.go();
}
@Test
public void selectDictGetByKeyComplexValue3() throws Exception {
String query = "select id, mapcol3[3]['b'] val from cp.`store/parquet/complex/map/parquet/000000_0.parquet`";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("id", "val")
.baselineValues(4, null)
.baselineValues(1, 2L)
.baselineValues(3, null)
.baselineValues(2, null)
.baselineValues(5, null)
.go();
}
@Test
public void testDictOrderByAnotherField() throws Exception {
String query = "select id, mapcol from cp.`store/parquet/complex/map/parquet/000000_0.parquet` order by id desc";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("id", "mapcol")
.baselineValues(5, TestBuilder.mapOfObject("b", 6, "c", 7, "a", 8, "abc4", 9, "bde", 10))
.baselineValues(4, TestBuilder.mapOfObject("a", 3, "b", 4, "c", 5))
.baselineValues(3, TestBuilder.mapOfObject("b", null, "c", 8, "d", 9, "e", 10))
.baselineValues(2, TestBuilder.mapOfObject("a", 1, "b", 2, "c", 3))
.baselineValues(1, TestBuilder.mapOfObject("b", 6, "c", 7))
.go();
}
@Test
public void testDictWithLimitAndOffset() throws Exception {
String query = "select id, mapcol from cp.`store/parquet/complex/map/parquet/000000_0.parquet` order by id desc limit 2 offset 2";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("id", "mapcol")
.baselineValues(3, TestBuilder.mapOfObject("b", null, "c", 8, "d", 9, "e", 10))
.baselineValues(2, TestBuilder.mapOfObject("a", 1, "b", 2, "c", 3))
.go();
}
@Test
public void testDictDictArrayValue() throws Exception {
String query = "select id, map_array from cp.`store/parquet/complex/map/parquet/000000_0.parquet`";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("id", "map_array")
.baselineValues(
4,
TestBuilder.listOf(
TestBuilder.mapOfObject(1L, 2, 10L, 1, 42L, 3, 31L, 4),
TestBuilder.mapOfObject(-1L, 2, 3L, 1, 5L, 3, 54L, 4, 55L, 589, -78L, 2),
TestBuilder.mapOfObject(1L, 124, 3L, 1, -4L, 2, 19L, 3, 5L, 3, 9L, 1),
TestBuilder.mapOfObject(1L, 89, 2L, 1, 3L, 3, 4L, 21, 5L, 12, 6L, 34),
TestBuilder.mapOfObject(1L, -25, 3L, 1, 5L, 3, 6L, 2, 9L, 333, 10L, 344),
TestBuilder.mapOfObject(3L, 222, 4L, 1, 5L, 3, 6L, 2, 7L, 1, 8L, 3),
TestBuilder.mapOfObject(1L, 11, 3L, 12, 5L, 13)
)
)
.baselineValues(
1,
TestBuilder.listOf(
TestBuilder.mapOfObject(8L, 1, 9L, 2, 523L, 4, 31L, 3),
TestBuilder.mapOfObject(1L, 2, 3L, 1, 5L, 3)
)
)
.baselineValues(
3,
TestBuilder.listOf(
TestBuilder.mapOfObject(3L, 1),
TestBuilder.mapOfObject(1L, 2)
)
)
.baselineValues(
2,
TestBuilder.listOf(
TestBuilder.mapOfObject(1L, 1, 2L, 2)
)
)
.baselineValues(
5,
TestBuilder.listOf(
TestBuilder.mapOfObject(1L, 1, 2L, 2, 3L, 3, 4L, 4),
TestBuilder.mapOfObject(1L, -1, 2L, -2),
TestBuilder.mapOfObject(1L, 4, 2L, 5, 3L, 7)
)
)
.go();
}
@Test
public void testDictArrayGetElementByIndex() throws Exception {
String query = "select id, map_array[0] as element from cp.`store/parquet/complex/map/parquet/000000_0.parquet`";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("id", "element")
.baselineValues(4, TestBuilder.mapOfObject(1L, 2, 10L, 1, 42L, 3, 31L, 4))
.baselineValues(1, TestBuilder.mapOfObject(8L, 1, 9L, 2, 523L, 4, 31L, 3))
.baselineValues(3, TestBuilder.mapOfObject(3L, 1))
.baselineValues(2, TestBuilder.mapOfObject(1L, 1, 2L, 2))
.baselineValues(5, TestBuilder.mapOfObject(1L, 1, 2L, 2, 3L, 3, 4L, 4))
.go();
}
@Test
public void testDictGetByLongKey() throws Exception {
String query = "select id, mapcol4[102] as val from cp.`store/parquet/complex/map/parquet/000000_0.parquet`";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("id", "val")
.baselineValues(1, TestBuilder.mapOfObject(false, "item_amount", 2L, "item_type", "eraser"))
.baselineValues(2, TestBuilder.mapOfObject(false, "item_amount", 3L, "item_type", "pen"))
.baselineValues(3, TestBuilder.mapOfObject())
.baselineValues(4, TestBuilder.mapOfObject())
.baselineValues(5, TestBuilder.mapOfObject())
.go();
}
@Test
public void testSelectDictFloatToFloat() throws Exception {
String query = "select id, mapcol2 as val from cp.`store/parquet/complex/map/parquet/000000_0.parquet`";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("id", "val")
.baselineValues(4, TestBuilder.mapOfObject(-9.01f, 2.0f, 0.43f, 4.3f))
.baselineValues(1, TestBuilder.mapOfObject(1.1f, -1.0f, 2.3f, 2.1f, 3.45f, 3.5f, 4.47f, 4.43f))
.baselineValues(3, TestBuilder.mapOfObject(7.9f, 0.43f, 3.1f, 21.1f, 1.1f, 3.53f))
.baselineValues(2, TestBuilder.mapOfObject(0.9f, 0.43f, 1.1f, 2.1f, 2.0f, 3.3f))
.baselineValues(5, TestBuilder.mapOfObject(1.1f, 255.34f, -2.0f, 24.0f, 45.53f, 78.22f))
.go();
}
@Test
public void testSelectDictGetByFloatKey() throws Exception {
String query = "select id, mapcol2['1.1'] as val from cp.`store/parquet/complex/map/parquet/000000_0.parquet`";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("id", "val")
.baselineValues(4, null)
.baselineValues(1, -1.0f)
.baselineValues(3, 3.53f)
.baselineValues(2, 2.1f)
.baselineValues(5, 255.34f)
.go();
}
@Test
public void testSelectDictGetByNegativeFloatKey() throws Exception {
String query = "select id, mapcol2['-9.01'] as val from cp.`store/parquet/complex/map/parquet/000000_0.parquet`";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("id", "val")
.baselineValues(4, 2.0f)
.baselineValues(1, null)
.baselineValues(3, null)
.baselineValues(2, null)
.baselineValues(5, null)
.go();
}
@Test
public void testDictOrderByValue() throws Exception {
String query = "select id, mapcol as val from cp.`store/parquet/complex/map/parquet/000000_0.parquet` order by mapcol['a'] desc";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("id", "val")
.baselineValues(1, TestBuilder.mapOfObject("b", 6, "c", 7))
.baselineValues(3, TestBuilder.mapOfObject("b", null, "c", 8, "d", 9, "e", 10))
.baselineValues(5, TestBuilder.mapOfObject("b", 6, "c", 7, "a", 8, "abc4", 9, "bde", 10))
.baselineValues(4, TestBuilder.mapOfObject("a", 3, "b", 4, "c", 5))
.baselineValues(2, TestBuilder.mapOfObject("a", 1, "b", 2, "c", 3))
.go();
}
@Test
public void testDictArrayElementGetByKey() throws Exception {
String query = "select map_array[1][5] as val from cp.`store/parquet/complex/map/parquet/000000_0.parquet` order by map_array[1][5] desc";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("val")
.baselineValuesForSingleColumn(null, null, null, 3, 3)
.go();
}
@Test
public void testDictArrayElementGetByStringKey() throws Exception {
String query = "select map_array[1]['1'] as val from cp.`store/parquet/complex/map/parquet/000000_0.parquet`";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("val")
.baselineValuesForSingleColumn(null, 2, 2, null, -1)
.go();
}
@Test
public void testDictArrayTypeOf() throws Exception {
String query = "select typeof(map_array) as type from cp.`store/parquet/complex/map/parquet/000000_0.parquet` limit 1";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("type")
.baselineValuesForSingleColumn("ARRAY<DICT<BIGINT,INT>>")
.go();
}
@Test
public void testDictTypeOf() throws Exception {
String query = "select typeof(map_array[0]) as type from cp.`store/parquet/complex/map/parquet/000000_0.parquet` limit 1";
testBuilder()
.sqlQuery(query)
.ordered()
.baselineColumns("type")
.baselineValuesForSingleColumn("DICT<BIGINT,INT>")
.go();
}
@Test
public void testDictFlatten() throws Exception {
String query = "select id, flatten(mapcol) as flat from cp.`store/parquet/complex/map/parquet/000000_0.parquet` order by id";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("id", "flat")
.baselineValues(1, TestBuilder.mapOfObject(false, "key", "b", "value", 6))
.baselineValues(1, TestBuilder.mapOfObject(false, "key", "c", "value", 7))
.baselineValues(3, TestBuilder.mapOfObject(false, "key", "b")) // "value" == null
.baselineValues(3, TestBuilder.mapOfObject(false, "key", "c", "value", 8))
.baselineValues(3, TestBuilder.mapOfObject(false, "key", "d", "value", 9))
.baselineValues(3, TestBuilder.mapOfObject(false, "key", "e", "value", 10))
.baselineValues(5, TestBuilder.mapOfObject(false, "key", "b", "value", 6))
.baselineValues(5, TestBuilder.mapOfObject(false, "key", "c", "value", 7))
.baselineValues(5, TestBuilder.mapOfObject(false, "key", "a", "value", 8))
.baselineValues(5, TestBuilder.mapOfObject(false, "key", "abc4", "value", 9))
.baselineValues(5, TestBuilder.mapOfObject(false, "key", "bde", "value", 10))
.baselineValues(4, TestBuilder.mapOfObject(false, "key", "a", "value", 3))
.baselineValues(4, TestBuilder.mapOfObject(false, "key", "b", "value", 4))
.baselineValues(4, TestBuilder.mapOfObject(false, "key", "c", "value", 5))
.baselineValues(2, TestBuilder.mapOfObject(false, "key", "a", "value", 1))
.baselineValues(2, TestBuilder.mapOfObject(false, "key", "b", "value", 2))
.baselineValues(2, TestBuilder.mapOfObject(false, "key", "c", "value", 3))
.go();
}
@Test
public void testDictArrayFlatten() throws Exception {
String query = "select id, flatten(map_array) flat from cp.`store/parquet/complex/map/parquet/000000_0.parquet`";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("id", "flat")
.baselineValues(4, TestBuilder.mapOfObject(1L, 2, 10L, 1, 42L, 3, 31L, 4))
.baselineValues(4, TestBuilder.mapOfObject(-1L, 2, 3L, 1, 5L, 3, 54L, 4, 55L, 589, -78L, 2))
.baselineValues(4, TestBuilder.mapOfObject(1L, 124, 3L, 1, -4L, 2, 19L, 3, 5L, 3, 9L, 1))
.baselineValues(4, TestBuilder.mapOfObject(1L, 89, 2L, 1, 3L, 3, 4L, 21, 5L, 12, 6L, 34))
.baselineValues(4, TestBuilder.mapOfObject(1L, -25, 3L, 1, 5L, 3, 6L, 2, 9L, 333, 10L, 344))
.baselineValues(4, TestBuilder.mapOfObject(3L, 222, 4L, 1, 5L, 3, 6L, 2, 7L, 1, 8L, 3))
.baselineValues(4, TestBuilder.mapOfObject(1L, 11, 3L, 12, 5L, 13))
.baselineValues(1, TestBuilder.mapOfObject(8L, 1, 9L, 2, 523L, 4, 31L, 3))
.baselineValues(1, TestBuilder.mapOfObject(1L, 2, 3L, 1, 5L, 3))
.baselineValues(3, TestBuilder.mapOfObject(3L, 1))
.baselineValues(3, TestBuilder.mapOfObject(1L, 2))
.baselineValues(2, TestBuilder.mapOfObject(1L, 1, 2L, 2))
.baselineValues(5, TestBuilder.mapOfObject(1L, 1, 2L, 2, 3L, 3, 4L, 4))
.baselineValues(5, TestBuilder.mapOfObject(1L, -1, 2L, -2))
.baselineValues(5, TestBuilder.mapOfObject(1L, 4, 2L, 5, 3L, 7))
.go();
}
@Test
public void testDictArrayAndElementFlatten() throws Exception {
String query = "select id, flatten(flatten(map_array)) flat from cp.`store/parquet/complex/map/parquet/000000_0.parquet`";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("id", "flat")
.baselineValues(4, TestBuilder.mapOf("key", 1L, "value", 2))
.baselineValues(4, TestBuilder.mapOf("key", 10L, "value", 1))
.baselineValues(4, TestBuilder.mapOf("key", 42L, "value", 3))
.baselineValues(4, TestBuilder.mapOf("key", 31L, "value", 4))
.baselineValues(4, TestBuilder.mapOf("key", -1L, "value", 2))
.baselineValues(4, TestBuilder.mapOf("key", 3L, "value", 1))
.baselineValues(4, TestBuilder.mapOf("key", 5L, "value", 3))
.baselineValues(4, TestBuilder.mapOf("key", 54L, "value", 4))
.baselineValues(4, TestBuilder.mapOf("key", 55L, "value", 589))
.baselineValues(4, TestBuilder.mapOf("key", -78L, "value", 2))
.baselineValues(4, TestBuilder.mapOf("key", 1L, "value", 124))
.baselineValues(4, TestBuilder.mapOf("key", 3L, "value", 1))
.baselineValues(4, TestBuilder.mapOf("key", -4L, "value", 2))
.baselineValues(4, TestBuilder.mapOf("key", 19L, "value", 3))
.baselineValues(4, TestBuilder.mapOf("key", 5L, "value", 3))
.baselineValues(4, TestBuilder.mapOf("key", 9L, "value", 1))
.baselineValues(4, TestBuilder.mapOf("key", 1L, "value", 89))
.baselineValues(4, TestBuilder.mapOf("key", 2L, "value", 1))
.baselineValues(4, TestBuilder.mapOf("key", 3L, "value", 3))
.baselineValues(4, TestBuilder.mapOf("key", 4L, "value", 21))
.baselineValues(4, TestBuilder.mapOf("key", 5L, "value", 12))
.baselineValues(4, TestBuilder.mapOf("key", 6L, "value", 34))
.baselineValues(4, TestBuilder.mapOf("key", 1L, "value", -25))
.baselineValues(4, TestBuilder.mapOf("key", 3L, "value", 1))
.baselineValues(4, TestBuilder.mapOf("key", 5L, "value", 3))
.baselineValues(4, TestBuilder.mapOf("key", 6L, "value", 2))
.baselineValues(4, TestBuilder.mapOf("key", 9L, "value", 333))
.baselineValues(4, TestBuilder.mapOf("key", 10L, "value", 344))
.baselineValues(4, TestBuilder.mapOf("key", 3L, "value", 222))
.baselineValues(4, TestBuilder.mapOf("key", 4L, "value", 1))
.baselineValues(4, TestBuilder.mapOf("key", 5L, "value", 3))
.baselineValues(4, TestBuilder.mapOf("key", 6L, "value", 2))
.baselineValues(4, TestBuilder.mapOf("key", 7L, "value", 1))
.baselineValues(4, TestBuilder.mapOf("key", 8L, "value", 3))
.baselineValues(4, TestBuilder.mapOf("key", 1L, "value", 11))
.baselineValues(4, TestBuilder.mapOf("key", 3L, "value", 12))
.baselineValues(4, TestBuilder.mapOf("key", 5L, "value", 13))
.baselineValues(1, TestBuilder.mapOf("key", 8L, "value", 1))
.baselineValues(1, TestBuilder.mapOf("key", 9L, "value", 2))
.baselineValues(1, TestBuilder.mapOf("key", 523L, "value", 4))
.baselineValues(1, TestBuilder.mapOf("key", 31L, "value", 3))
.baselineValues(1, TestBuilder.mapOf("key", 1L, "value", 2))
.baselineValues(1, TestBuilder.mapOf("key", 3L, "value", 1))
.baselineValues(1, TestBuilder.mapOf("key", 5L, "value", 3))
.baselineValues(3, TestBuilder.mapOf("key", 3L, "value", 1))
.baselineValues(3, TestBuilder.mapOf("key", 1L, "value", 2))
.baselineValues(2, TestBuilder.mapOf("key", 1L, "value", 1))
.baselineValues(2, TestBuilder.mapOf("key", 2L, "value", 2))
.baselineValues(5, TestBuilder.mapOf("key", 1L, "value", 1))
.baselineValues(5, TestBuilder.mapOf("key", 2L, "value", 2))
.baselineValues(5, TestBuilder.mapOf("key", 3L, "value", 3))
.baselineValues(5, TestBuilder.mapOf("key", 4L, "value", 4))
.baselineValues(5, TestBuilder.mapOf("key", 1L, "value", -1))
.baselineValues(5, TestBuilder.mapOf("key", 2L, "value", -2))
.baselineValues(5, TestBuilder.mapOf("key", 1L, "value", 4))
.baselineValues(5, TestBuilder.mapOf("key", 2L, "value", 5))
.baselineValues(5, TestBuilder.mapOf("key", 3L, "value", 7))
.go();
}
@Test
public void selectDictFlattenListValue() throws Exception {
String query = "select id, flatten(mapcol5[1]) as flat from cp.`store/parquet/complex/map/parquet/000000_0.parquet`";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("id", "flat")
.baselineValues(2, 1)
.baselineValues(2, 2)
.baselineValues(2, 3)
.baselineValues(2, 4)
.baselineValues(2, 5)
.baselineValues(3, 1)
.baselineValues(3, 2)
.baselineValues(3, 3)
.baselineValues(3, 4)
.baselineValues(3, 5)
.baselineValues(4, 3)
.baselineValues(4, 4)
.baselineValues(4, 5)
.baselineValues(4, 10)
.baselineValues(4, -2)
.baselineValues(4, -4)
.go();
}
@Test
public void testDictValueInFilter() throws Exception {
String query = "select id, mapcol from cp.`store/parquet/complex/map/parquet/000000_0.parquet` where mapcol['c'] > 5";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("id", "mapcol")
.baselineValues(1, TestBuilder.mapOfObject("b", 6, "c", 7))
.baselineValues(3, TestBuilder.mapOfObject("b", null, "c", 8, "d", 9, "e", 10))
.baselineValues(5, TestBuilder.mapOfObject("b", 6, "c", 7, "a", 8, "abc4", 9, "bde", 10))
.go();
}
@Test
public void testDictValueInFilter2() throws Exception {
String query = "select id, mapcol from cp.`store/parquet/complex/map/parquet/000000_0.parquet` where mapcol['a'] is null";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("id", "mapcol")
.baselineValues(1, TestBuilder.mapOfObject("b", 6, "c", 7))
.baselineValues(3, TestBuilder.mapOfObject("b", null, "c", 8, "d", 9, "e", 10))
.go();
}
@Test
public void testDictValueInFilter3() throws Exception {
String query = "select id, mapcol from cp.`store/parquet/complex/map/parquet/000000_0.parquet` where mapcol['b'] is not null";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("id", "mapcol")
.baselineValues(1, TestBuilder.mapOfObject("b", 6, "c", 7))
.baselineValues(5, TestBuilder.mapOfObject("b", 6, "c", 7, "a", 8, "abc4", 9, "bde", 10))
.baselineValues(4, TestBuilder.mapOfObject("a", 3, "b", 4, "c", 5))
.baselineValues(2, TestBuilder.mapOfObject("a", 1, "b", 2, "c", 3))
.go();
}
@Test // DRILL-7473
public void testDictInRepeatedMap() throws Exception {
String query = "select struct_array[1].d as d from cp.`store/parquet/complex/map/parquet/repeated_struct_with_dict.parquet`";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("d")
.baselineValuesForSingleColumn(
TestBuilder.mapOfObject(1, "a", 2, "b", 3, "c"),
TestBuilder.mapOfObject(),
TestBuilder.mapOfObject(1, "a", 2, "b")
)
.go();
}
@Test // DRILL-7491
public void testCountOnComplexTypes() throws Exception {
String query = "SELECT " +
"COUNT(c13) cnt13, COUNT(c14) cnt14, " +
"COUNT(c15) cnt15, COUNT(c16) cnt16 " +
"FROM cp.`parquet/hive_all/hive_alltypes.parquet`";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("cnt13", "cnt14", "cnt15", "cnt16")
.baselineValues(3L, 0L, 3L, 3L)
.go();
}
@Test // DRILL-7509
public void selectRepeatedMapWithFilter() throws Exception {
String query = "select id, struct_array[1].b as b from cp.`store/parquet/complex/repeated_struct.parquet` where struct_array[1].b is null";
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("id", "b")
.baselineValues(2, null)
.go();
}
@Test
public void testNewComplexParquetReaderUUID() throws Exception {
String query = "select `uuid_req1`, `uuid_opt1`, `uuid_req2` from cp.`store/parquet/complex/uuid.parquet` order by `uuid_req1`, `uuid_opt1`, `uuid_req2` limit 1";
byte[] firstValue = {0, 39, -125, -76, -113, 95, 73, -68, -68, 61, -89, -24, 123, -40, 94, -6};
byte[] secondValue = {74, -38, 0, -43, -73, 101, 67, -11, -68, -17, -63, 111, -20, 70, -93, -76};
testBuilder()
.optionSettingQueriesForTestQuery("alter session set `store.parquet.use_new_reader` = false")
.sqlQuery(query)
.unOrdered()
.baselineColumns("uuid_req1", "uuid_opt1", "uuid_req2")
.baselineValues(firstValue, null, secondValue)
.go();
}
@Test
public void testSelectRepeatedInt() throws Exception {
// DRILL-8458
String query = "select repeatedInt as r from %s";
testBuilder()
.sqlQuery(query, "cp.`parquet/parquet_v2_repeated_int.parquet`")
.unOrdered()
.expectsNumRecords(100)
.go();
}
}
|
apache/systemds | 36,024 | src/main/java/org/apache/sysds/runtime/compress/lib/CLALibLeftMultBy.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysds.runtime.compress.lib;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.sysds.runtime.compress.CompressedMatrixBlock;
import org.apache.sysds.runtime.compress.DMLCompressionException;
import org.apache.sysds.runtime.compress.colgroup.AColGroup;
import org.apache.sysds.runtime.compress.colgroup.APreAgg;
import org.apache.sysds.runtime.compress.colgroup.dictionary.AIdentityDictionary;
import org.apache.sysds.runtime.compress.colgroup.dictionary.IdentityDictionary;
import org.apache.sysds.runtime.data.DenseBlock;
import org.apache.sysds.runtime.data.SparseBlock;
import org.apache.sysds.runtime.functionobjects.Plus;
import org.apache.sysds.runtime.matrix.data.LibMatrixBincell;
import org.apache.sysds.runtime.matrix.data.LibMatrixMult;
import org.apache.sysds.runtime.matrix.data.LibMatrixReorg;
import org.apache.sysds.runtime.matrix.data.MatrixBlock;
import org.apache.sysds.runtime.matrix.operators.BinaryOperator;
import org.apache.sysds.runtime.util.CommonThreadPool;
import org.apache.sysds.utils.stats.Timing;
public final class CLALibLeftMultBy {
private static final Log LOG = LogFactory.getLog(CLALibLeftMultBy.class.getName());
// /** Reusable cache intermediate double array for temporary lmm */
// private static ThreadLocal<Pair<Boolean, double[]>> cacheIntermediate = null;
private CLALibLeftMultBy() {
// private constructor
}
/**
* Left multiplication with a CompressedMatrixBlock on the right following the equation:
*
* <p>
* ret = t(left) %*% right
* </p>
*
* @param right A CompressedMatrixBlock on the right side of the multiplication.
* @param left A not transposed MatrixBlock.
* @param ret The result output matrix, this allocation of the object can be used if appropriate, otherwise a new
* matrix Block is allocated to be returned. This argument can also be null.
* @param k The number of threads allowed to be used
* @return The result of the matrix multiplication
*/
public static MatrixBlock leftMultByMatrixTransposed(CompressedMatrixBlock right, MatrixBlock left, MatrixBlock ret,
int k) {
if(left.isEmpty() || right.isEmpty())
return prepareEmptyReturnMatrix(right, left, ret, true);
if(left.getNumColumns() > 1)
LOG.warn("Transposing matrix block for transposed left matrix multiplication");
MatrixBlock transposed = new MatrixBlock(left.getNumColumns(), left.getNumRows(), false);
LibMatrixReorg.transpose(left, transposed, k);
ret = leftMultByMatrix(right, transposed, ret, k);
return ret;
}
/**
* Left multiplication with two CompressedMatrixBlock following the equation:
*
* <p>
* ret = t(left) %*% right
* </p>
*
* @param right A CompressedMatrixBlock on the right side of the multiplication.
* @param left A not transposed CompressedMatrixBlock, but logically inside the function it is considered
* transposed.
* @param ret The result output matrix, this allocation of the object can be used if appropriate, otherwise a new
* matrix Block is allocated to be returned. This argument can also be null.
* @param k The number of threads allowed to be used
* @return The result of the matrix multiplication
*/
public static MatrixBlock leftMultByMatrixTransposed(CompressedMatrixBlock right, CompressedMatrixBlock left,
MatrixBlock ret, int k) {
try {
if(left.isEmpty() || right.isEmpty())
return prepareEmptyReturnMatrix(right, left, ret, true);
ret = prepareReturnMatrix(right, left, ret, true);
leftMultByCompressedTransposedMatrix(right, left, ret, k);
return ret;
}
catch(Exception e) {
throw new DMLCompressionException("Failed CLA Compressed Transposed LMM", e);
}
}
/**
* Left multiplication with two CompressedMatrixBlock following the equation:
*
* ret = left %*% right
*
* @param right A CompressedMatrixBlock on the right side of the multiplication.
* @param left A MatrixBlock on the left side of the equation
* @param ret The result output matrix, this allocation of the object can be used if appropriate, otherwise a new
* matrix Block is allocated to be returned. This argument can also be null.
* @param k The number of threads allowed to be used
* @return The result of the matrix multiplication
*/
public static MatrixBlock leftMultByMatrix(CompressedMatrixBlock right, MatrixBlock left, MatrixBlock ret, int k) {
try {
// return LibMatrixMult.matrixMult(left, right.getUncompressed(), ret, k); // uncompressed example
if(left.isEmpty() //
|| right.isEmpty())
return prepareEmptyReturnMatrix(right, left, ret, false);
if(CLALibSelectionMult.isSelectionMatrix(left))
return CLALibSelectionMult.leftSelection(right, left, ret, k);
ret = prepareReturnMatrix(right, left, ret, false);
ret = LMM(right.getColGroups(), left, ret, k, right.isOverlapping());
return ret;
}
catch(Exception e) {
throw new DMLCompressionException("Failed CLA LMM", e);
}
}
private static MatrixBlock prepareEmptyReturnMatrix(MatrixBlock m1, MatrixBlock m2, MatrixBlock ret,
boolean doTranspose) {
final int numRowsOutput = doTranspose ? m2.getNumColumns() : m2.getNumRows();
final int numColumnsOutput = m1.getNumColumns();
if(ret == null)
ret = new MatrixBlock(numRowsOutput, numColumnsOutput, true, 0);
else if(!(ret.getNumColumns() == numColumnsOutput && ret.getNumRows() == numRowsOutput && ret.isAllocated()))
ret.reset(numRowsOutput, numColumnsOutput, true, 0);
return ret;
}
private static MatrixBlock prepareReturnMatrix(MatrixBlock m1, MatrixBlock m2, MatrixBlock ret,
boolean doTranspose) {
final int numRowsOutput = doTranspose ? m2.getNumColumns() : m2.getNumRows();
final int numColumnsOutput = m1.getNumColumns();
if(ret == null)
ret = new MatrixBlock(numRowsOutput, numColumnsOutput, false, numRowsOutput * numColumnsOutput);
else if(!(ret.getNumColumns() == numColumnsOutput && ret.getNumRows() == numRowsOutput && ret.isAllocated()))
ret.reset(numRowsOutput, numColumnsOutput, false, numRowsOutput * numColumnsOutput);
ret.allocateDenseBlock();
return ret;
}
private static MatrixBlock leftMultByCompressedTransposedMatrix(CompressedMatrixBlock right,
CompressedMatrixBlock left, final MatrixBlock ret, int k) throws Exception {
if(k > 1)
return leftMultByCompressedTransposedMatrixParallel(right, left, ret, k);
else
return leftMultByCompressedTransposedMatrixSingleThread(right, left, ret);
}
private static MatrixBlock leftMultByCompressedTransposedMatrixParallel(CompressedMatrixBlock right,
CompressedMatrixBlock left, final MatrixBlock ret, int k) throws Exception {
final int sd = right.getNumRows(); // shared dim
final int cr = right.getNumColumns();
final int rl = left.getNumColumns();
final List<AColGroup> rightCG = right.getColGroups();
final List<AColGroup> leftCG = left.getColGroups();
final boolean containsRight = CLALibUtils.shouldPreFilter(rightCG);
final double[] cR = containsRight ? new double[cr] : null;
final List<AColGroup> fRight = CLALibUtils.filterGroups(rightCG, cR);
final boolean containsLeft = CLALibUtils.shouldPreFilter(leftCG);
final double[] cL = containsLeft ? new double[rl] : null;
final List<AColGroup> fLeft = CLALibUtils.filterGroups(leftCG, cL);
// Force dense output
ret.allocateDenseBlock();
ret.setNonZeros((long) ret.getNumRows() * ret.getNumColumns());
final ExecutorService pool = CommonThreadPool.get(k);
try {
final List<Future<MatrixBlock>> t = new ArrayList<>();
for(int j = 0; j < fLeft.size(); j++) {
final int jj = j;
t.add(pool.submit(() -> {
MatrixBlock retT = new MatrixBlock(ret.getNumRows(), ret.getNumColumns(), false);
retT.allocateDenseBlock();
for(int i = 0; i < fRight.size(); i++) {
fRight.get(i).leftMultByAColGroup(fLeft.get(jj), retT, sd);
}
retT.examSparsity(true);
return retT;
}));
}
if(containsLeft && containsRight)
// if both -- multiply the left and right vectors scaling by number of shared dim
outerProductWithScaling(cL, cR, sd, ret);
if(containsLeft) // if left -- multiply left with right sum
for(Future<?> f : outerProductParallelTasks(cL, CLALibUtils.getColSum(fRight, cr, sd), ret, pool))
f.get();
if(containsRight)// if right -- multiply right with left sum
for(Future<?> f : outerProductParallelTasks(CLALibUtils.getColSum(fLeft, rl, sd), cR, ret, pool))
f.get();
for(Future<MatrixBlock> f : t) {
MatrixBlock mb = f.get();
if(!mb.isEmpty()) {
if(mb.isInSparseFormat())
LibMatrixBincell.bincellOpInPlaceRight(ret, mb, new BinaryOperator(Plus.getPlusFnObject()));
else if(mb.getDenseBlock().isContiguous()) {
final double[] retV = ret.getDenseBlockValues();
LibMatrixMult.vectAdd(mb.getDenseBlockValues(), retV, 0, 0, retV.length);
}
else
LibMatrixBincell.bincellOpInPlaceRight(ret, mb, new BinaryOperator(Plus.getPlusFnObject()));
}
}
ret.recomputeNonZeros(k);
}
finally {
pool.shutdown();
}
return ret;
}
private static MatrixBlock leftMultByCompressedTransposedMatrixSingleThread(CompressedMatrixBlock right,
CompressedMatrixBlock left, final MatrixBlock ret) {
final int sd = right.getNumRows(); // shared dim
final int cr = right.getNumColumns();
final int rl = left.getNumColumns();
final List<AColGroup> rightCG = right.getColGroups();
final List<AColGroup> leftCG = left.getColGroups();
final boolean containsRight = CLALibUtils.shouldPreFilter(rightCG);
final double[] cR = containsRight ? new double[cr] : null;
final List<AColGroup> fRight = CLALibUtils.filterGroups(rightCG, cR);
final boolean containsLeft = CLALibUtils.shouldPreFilter(leftCG);
final double[] cL = containsLeft ? new double[rl] : null;
final List<AColGroup> fLeft = CLALibUtils.filterGroups(leftCG, cL);
// Force dense output
ret.setNonZeros((long) ret.getNumRows() * ret.getNumColumns());
ret.allocateDenseBlock();
for(int j = 0; j < fLeft.size(); j++)
for(int i = 0; i < fRight.size(); i++)
fRight.get(i).leftMultByAColGroup(fLeft.get(j), ret, sd);
if(containsLeft && containsRight)
// if both -- multiply the left and right vectors scaling by number of shared dim
outerProductWithScaling(cL, cR, sd, ret);
if(containsLeft) // if left -- multiply left with right sum
outerProductSingleThread(cL, CLALibUtils.getColSum(fRight, cr, sd), ret);
if(containsRight)// if right -- multiply right with left sum
outerProductSingleThread(CLALibUtils.getColSum(fLeft, rl, sd), cR, ret);
ret.recomputeNonZeros();
return ret;
}
private static MatrixBlock LMM(List<AColGroup> colGroups, MatrixBlock that, MatrixBlock ret, int k,
boolean overlapping) throws Exception {
final int numColumnsOut = ret.getNumColumns();
final int lr = that.getNumRows();
final boolean shouldFilter = CLALibUtils.shouldPreFilter(colGroups);
final List<AColGroup> noPreAggGroups = new ArrayList<>();
final List<APreAgg> preAggGroups = new ArrayList<>();
if(shouldFilter) {
// Timing t = new Timing();
final double[] constV;
// if(CLALibUtils.alreadyPreFiltered(colGroups, ret.getNumColumns())) {
// constV = CLALibUtils.filterGroupsAndSplitPreAggOneConst(colGroups, noPreAggGroups, preAggGroups);
// }
// else {
constV = new double[numColumnsOut]; // millions of columns...
CLALibUtils.filterGroupsAndSplitPreAgg(colGroups, constV, noPreAggGroups, preAggGroups);
// }
// final double filterGroupsTime = t.stop();
// Sort so that the big expensive preAgg groups are first to balance threads
// if(k * 2 < colGroups.size())
// Collections.sort(preAggGroups, Comparator.comparing(AColGroup::getNumValues).reversed());
final double[] rowSums;
if(!noPreAggGroups.isEmpty() || !preAggGroups.isEmpty()) {
final int sizeSum = preAggGroups.size() + noPreAggGroups.size();
rowSums = new double[lr];
if(k == 1 || sizeSum == 1)
LMMTaskExec(noPreAggGroups, preAggGroups, that, ret, 0, lr, rowSums);
else
LMMParallel(noPreAggGroups, preAggGroups, that, ret, rowSums, overlapping, k);
}
else
rowSums = that.rowSum(k).getDenseBlockValues();
// add the correction layer for the subtracted common values.
if(rowSums != null) {
if(ret.isEmpty())
ret.allocateDenseBlock();
else
ret.sparseToDense();
outerProduct(rowSums, constV, ret, k);
}
}
else {
CLALibUtils.splitPreAgg(colGroups, noPreAggGroups, preAggGroups);
// Sort so that the big expensive preAgg groups are first.
// Collections.sort(preAggGroups, Comparator.comparing(AColGroup::getNumValues).reversed());
if(k == 1 || colGroups.size() == 1)
LMMTaskExec(noPreAggGroups, preAggGroups, that, ret, 0, lr, null);
else
LMMParallel(noPreAggGroups, preAggGroups, that, ret, null, overlapping, k);
}
ret.recomputeNonZeros(k);
ret.examSparsity(k);
return ret;
}
private static void LMMParallel(List<AColGroup> npa, List<APreAgg> pa, MatrixBlock that, MatrixBlock ret,
double[] rowSums, boolean overlapping, int k) throws Exception {
final ExecutorService pool = CommonThreadPool.get(k);
try {
final int nG = npa.size() + pa.size();
final boolean useTmp = (overlapping && nG > 1) //
|| (nG * 2 < k && ret.getNumColumns() < 1000);
// skip value to parallelize the pa groups without allocating new arrays
if(!useTmp)
LMMParallelNoTempOut(npa, pa, that, ret, rowSums, overlapping, k, pool);
else
LMMParallelTempOut(npa, pa, that, ret, rowSums, overlapping, k, pool);
}
finally {
pool.shutdown();
}
}
private static void LMMParallelNoTempOut(List<AColGroup> npa, List<APreAgg> pa, MatrixBlock that, MatrixBlock ret,
double[] rowSums, boolean overlapping, int k, ExecutorService pool) throws Exception {
final int s = Math.min(pa.size(), k);
final int rt = that.getNumRows();
final int ct = that.getNumColumns();
final int rowBlockSize = Math.max(rt / k, 1);
// skip value to parallelize the pa groups without allocating new arrays
final ArrayList<Future<?>> tasks = new ArrayList<>();
// Put results directly into ret
for(int blo = 0; blo < rt; blo += rowBlockSize) {
final int start = blo;
final int end = Math.min(blo + rowBlockSize, rt);
LLMNoTempOutRowBlockTasks(npa, pa, that, ret, rowSums, pool, s, ct, tasks, start, end, k);
}
for(Future<?> future : tasks)
future.get();
}
private static void LLMNoTempOutRowBlockTasks(List<AColGroup> npa, List<APreAgg> pa, MatrixBlock that,
MatrixBlock ret, double[] rowSums, ExecutorService pool, final int s, final int ct,
final ArrayList<Future<?>> tasks, final int start, final int end, int k) {
for(AColGroup g : npa) // all non aggregate groups task
noTmpNoAggGroups(that, ret, pool, ct, tasks, start, end, g, k);
for(int off = 0; off < s; off++) {
// all pre-aggregate group tasks
// s ensure that there is no more than k number of tasks.
final int offT = off;
tasks.add(pool.submit(() -> LMMWithPreAgg(pa, that, ret, start, end, 0, ct, offT, s, null)));
}
if(rowSums != null) // row sums task
tasks.add(pool.submit(() -> rowSum(that, rowSums, start, end, 0, ct)));
}
private static void noTmpNoAggGroups(MatrixBlock that, MatrixBlock ret, ExecutorService pool, final int ct,
final ArrayList<Future<?>> tasks, final int start, final int end, AColGroup g, int k) {
final List<Future<MatrixBlock>> npaSubTask = new ArrayList<>();
final int retNRow = ret.getNumRows();
final int retNCol = ret.getNumColumns();
if(retNCol < 1000000) {
final int colBlockSize = Math.max(ct / Math.max(k, 2), 64000);
for(int bloC = 0; bloC < ct; bloC += colBlockSize) {
final int startC = bloC;
final int endC = Math.min(bloC + colBlockSize, ct);
npaSubTask.add(pool.submit(() -> {
Timing t = new Timing();
final double[] tmp = new double[retNRow * retNCol];
final MatrixBlock tmpBlock = new MatrixBlock(retNRow, retNCol, tmp);
g.leftMultByMatrixNoPreAgg(that, tmpBlock, start, end, startC, endC);
LOG.debug("noPreAggTiming: " + t);
return tmpBlock;
}));
}
tasks.add(pool.submit(() -> addInPlaceFuture(ret, npaSubTask)));
}
else {
tasks.add(pool.submit(() -> g.leftMultByMatrixNoPreAgg(that, ret, start, end, 0, ct)));
}
}
private static Object addInPlaceFuture(MatrixBlock ret, List<Future<MatrixBlock>> npaSubTask) throws Exception {
for(Future<MatrixBlock> f : npaSubTask)
addInPlace(f.get(), ret);
return null;
}
private static void LMMParallelTempOut(List<AColGroup> npa, List<APreAgg> pa, MatrixBlock that, MatrixBlock ret,
double[] rowSums, boolean overlapping, int k, ExecutorService pool) throws Exception {
final int rt = that.getNumRows();
final int ct = that.getNumColumns();
// perfect parallel over rows left.
final int rowBlockSize = Math.max(rt / k, 1);
final int threadsUsedOnRows = (int) Math.ceil((double) rt / rowBlockSize);
k = Math.max(1, k / threadsUsedOnRows);
// parallel over column blocks ... should be bigger than largest distinct.
// final int colBlockSize = Math.max(ct, 1);
final int s = Math.min(npa.size() + pa.size(), k);
k = Math.max(1, k / s); //
// We set it to minimum 4k
final int colBlockSize = Math.max(ct / k, 64000);
final int threadsUsedOnColBlocks = (int) Math.ceil((double) ct / colBlockSize);
k = k / threadsUsedOnColBlocks;
final ArrayList<Future<MatrixBlock>> tasks = new ArrayList<>();
// allocate temp
final int retCols = ret.getNumColumns();
final int retRows = ret.getNumRows();
for(int blo = 0; blo < rt; blo += rowBlockSize) {
final int start = blo;
final int end = Math.min(blo + rowBlockSize, rt);
for(AColGroup g : npa) // all groups get their own task
tasks.add(pool.submit(new LMMNoPreAggTask(g, that, retRows, retCols, start, end)));
for(int off = 0; off < s; off++) { // only allocate k tasks at max
final int offT = off;
if(that.isInSparseFormat()) {
tasks.add(pool.submit(new LMMPreAggTask(pa, that, retRows, retCols, start, end, 0, ct, offT, s, null)));
}
else {
for(int bloC = 0; bloC < ct; bloC += colBlockSize) {
final int startC = bloC;
final int endC = Math.min(startC + colBlockSize, ct);
tasks.add(pool
.submit(new LMMPreAggTask(pa, that, retRows, retCols, start, end, startC, endC, offT, s, null)));
}
}
}
if(rowSums != null) // row sums task
tasks.add(pool.submit(new LMMRowSums(that, start, end, rowSums)));
}
addInPlaceFuture(ret, tasks);
}
private static Object addInPlace(MatrixBlock a, MatrixBlock out) throws Exception {
if(a != null) {
final DenseBlock dba = a.getDenseBlock();
final DenseBlock dbb = out.getDenseBlock();
final int blocks = dba.numBlocks();
for(int b = 0; b < blocks; b++) {
final double[] av = dba.valuesAt(b);
final double[] bv = dbb.valuesAt(b);
final int len = av.length;
for(int i = 0; i < len; i++) {
bv[i] += av[i];
}
}
}
return null;
}
private static void LMMTaskExec(List<AColGroup> npa, List<APreAgg> pa, MatrixBlock that, MatrixBlock ret, int rl,
int ru, double[] rowSums) throws Exception {
final int cu = that.getNumColumns();
if(npa.isEmpty() && pa.isEmpty()) {
rowSum(that, rowSums, rl, ru, 0, cu);
return;
}
for(int r = rl; r < ru; r += 4) {
final int re = Math.min(r + 4, ru);
// Process MMs.
for(int i = 0; i < npa.size(); i++) {
npa.get(i).leftMultByMatrixNoPreAgg(that, ret, r, re, 0, cu);
}
if(pa.size() > 0)
LMMWithPreAgg(pa, that, ret, r, re, 0, cu, 0, 1, rowSums);
}
}
private static void outerProduct(final double[] leftRowSum, final double[] rightColumnSum, final MatrixBlock result,
int k) throws InterruptedException, ExecutionException {
if(k > 1)
outerProductParallel(leftRowSum, rightColumnSum, result, k);
else
outerProductSingleThread(leftRowSum, rightColumnSum, result);
}
private static void outerProductParallel(final double[] leftRowSum, final double[] rightColumnSum,
final MatrixBlock result, int k) throws InterruptedException, ExecutionException {
final ExecutorService pool = CommonThreadPool.get(k);
try {
for(Future<?> t : outerProductParallelTasks(leftRowSum, rightColumnSum, result, pool))
t.get();
}
finally {
pool.shutdown();
}
}
private static void outerProductRange(final double[] leftRowSum, final double[] rightColumnSum,
final MatrixBlock result, int rl, int ru, int cl, int cu) {
if(result.getDenseBlock().isContiguous())
outerProductRangeContiguous(leftRowSum, rightColumnSum, result.getDenseBlockValues(), rl, ru, cl, cu);
else
outerProductRangeGeneric(leftRowSum, rightColumnSum, result.getDenseBlock(), rl, ru, cl, cu);
}
private static void outerProductRangeContiguous(final double[] leftRowSum, final double[] rightColumnSum,
final double[] result, int rl, int ru, int cl, int cu) {
for(int row = rl; row < ru; row++) {
final int offOut = rightColumnSum.length * row;
final double vLeft = leftRowSum[row];
if(vLeft != 0) {
for(int col = cl; col < cu; col++) {
result[offOut + col] += vLeft * rightColumnSum[col];
}
}
}
}
private static void outerProductRangeGeneric(final double[] leftRowSum, final double[] rightColumnSum,
final DenseBlock res, int rl, int ru, int cl, int cu) {
for(int row = rl; row < ru; row++) {
final int offOut = res.pos(row);
final double[] result = res.values(row);
final double vLeft = leftRowSum[row];
if(vLeft != 0) {
for(int col = cl; col < cu; col++) {
result[offOut + col] += vLeft * rightColumnSum[col];
}
}
}
}
private static void outerProductSingleThread(final double[] leftRowSum, final double[] rightColumnSum,
MatrixBlock result) {
final int blkz = 1024;
for(int row = 0; row < leftRowSum.length; row += blkz) {
final int rl = row;
final int ru = Math.min(leftRowSum.length, row + blkz);
final int colBz = outerProdGetColBz(blkz, row, rl, ru);
for(int col = 0; col < rightColumnSum.length; col += colBz) {
final int cl = col;
final int cu = Math.min(rightColumnSum.length, col + colBz);
outerProductRange(leftRowSum, rightColumnSum, result, rl, ru, cl, cu);
}
}
}
private static List<Future<?>> outerProductParallelTasks(final double[] leftRowSum, final double[] rightColumnSum,
final MatrixBlock result, ExecutorService pool) {
// windows of 1024 each
final int blkz = 1024;
final List<Future<?>> tasks = new ArrayList<>();
for(int row = 0; row < leftRowSum.length; row += blkz) {
final int rl = row;
final int ru = Math.min(leftRowSum.length, row + blkz);
final int colBz = outerProdGetColBz(blkz, row, rl, ru);
for(int col = 0; col < rightColumnSum.length; col += colBz) {
final int cl = col;
final int cu = Math.min(rightColumnSum.length, col + colBz);
tasks.add(pool.submit(() -> {
outerProductRange(leftRowSum, rightColumnSum, result, rl, ru, cl, cu);
}));
}
}
return tasks;
}
private static int outerProdGetColBz(final int blkz, int row, final int rl, final int ru) {
final int colBz;
if(ru < row + blkz)
colBz = 1024 * 1024 - ((ru - rl) * 1024) + 1024;
else
colBz = blkz;
return colBz;
}
private static void outerProductWithScaling(final double[] leftRowSum, final double[] rightColumnSum,
final int scaling, final MatrixBlock result) {
if(result.getDenseBlock().isContiguous())
outerProductWithScalingContiguous(leftRowSum, rightColumnSum, scaling, result.getDenseBlockValues());
else
outerProductWithScalingGeneric(leftRowSum, rightColumnSum, scaling, result.getDenseBlock());
}
private static void outerProductWithScalingContiguous(final double[] leftRowSum, final double[] rightColumnSum,
final int scaling, final double[] result) {
for(int row = 0; row < leftRowSum.length; row++) {
final int offOut = rightColumnSum.length * row;
final double vLeft = leftRowSum[row] * scaling;
for(int col = 0; col < rightColumnSum.length; col++) {
result[offOut + col] += vLeft * rightColumnSum[col];
}
}
}
private static void outerProductWithScalingGeneric(final double[] leftRowSum, final double[] rightColumnSum,
final int scaling, final DenseBlock res) {
for(int row = 0; row < leftRowSum.length; row++) {
final int offOut = res.pos(row);
final double[] result = res.values(row);
final double vLeft = leftRowSum[row] * scaling;
for(int col = 0; col < rightColumnSum.length; col++) {
result[offOut + col] += vLeft * rightColumnSum[col];
}
}
}
private static void LMMWithPreAgg(List<APreAgg> preAggCGs, MatrixBlock that, MatrixBlock ret, int rl, int ru, int cl,
int cu, int off, int skip, double[] rowSums) {
try {
if(!that.isInSparseFormat())
LMMWithPreAggDense(preAggCGs, that, ret, rl, ru, cl, cu, off, skip, rowSums);
else
LMMWithPreAggSparse(preAggCGs, that, ret, rl, ru, cl, cu, off, skip, rowSums);
}
catch(Exception e) {
throw new RuntimeException("Failed LLM pre aggregate", e);
}
}
private static void LMMWithPreAggSparse(List<APreAgg> preAggCGs, MatrixBlock that, MatrixBlock ret, int rl, int ru,
int cl, int cu, int off, int skip, double[] rowSum) throws Exception {
final MatrixBlock preA = new MatrixBlock();
final MatrixBlock fTmp = new MatrixBlock();
final SparseBlock sb = that.getSparseBlock();
for(int j = off; j < preAggCGs.size(); j += skip) { // selected column groups for this thread.
final int nCol = preAggCGs.get(j).getNumCols();
final int nVal = preAggCGs.get(j).getNumValues();
final APreAgg g = preAggCGs.get(j);
for(int r = rl; r < ru; r++) {
preAggSparseRow(that, ret, cl, cu, preA, fTmp, sb, nCol, nVal, g, r);
}
}
if(rowSum != null)
rowSumSparse(that.getSparseBlock(), rowSum, rl, ru, cl, cu);
}
private static void preAggSparseRow(MatrixBlock that, MatrixBlock ret, int cl, int cu, final MatrixBlock preA,
final MatrixBlock fTmp, final SparseBlock sb, final int nCol, final int nVal, final APreAgg g, int r) {
if(sb.isEmpty(r))
return;
final int rcu = r + 1;
// if(sb.size(r) * nCol < sb.size(r) + (long) nCol * nVal) {
// g.leftMultByMatrixNoPreAgg(that, ret, r, rcu, cl, cu);
// }
// else {
if(!preA.isAllocated()) {
preA.reset(1, nVal);
preA.allocateDenseBlock();
}
else
preA.reset(1, nVal);
allocateOrResetTmpRes(ret, fTmp, 1);
final double[] preAV = preA.getDenseBlockValues();
preA.setNonZeros(g.getPreAggregateSize());
fTmp.setNonZeros(1);
g.preAggregateSparse(sb, preAV, r, rcu, cl, cu);
g.mmWithDictionary(preA, fTmp, ret, 1, r, rcu);
// }
}
private static void allocateOrResetTmpRes(final MatrixBlock ret, final MatrixBlock fTmp, int rows) {
if(!fTmp.isAllocated()) {
fTmp.reset(rows, ret.getNumColumns());
fTmp.allocateDenseBlock();
}
else
fTmp.reset(rows, ret.getNumColumns());
}
private static void LMMWithPreAggDense(final List<APreAgg> preAggCGs, final MatrixBlock that, final MatrixBlock ret,
final int rl, final int ru, final int cl, final int cu, final int off, final int skip, final double[] rowSum)
throws InterruptedException, ExecutionException {
// Timing t = new Timing();
// ExecutorService pool = CommonThreadPool.get(k);
/** The column block size for preAggregating column groups */
// final int colBZ = 1024;
final int colBZ = 2048;
// final int colBZ = Math.max(1024, lc/2);
// The number of rows to process together
final int rowBlockSize = 4;
// The number of column groups to process together
// the value should ideally be set so that the colGroups fits into cache together with a row block.
// currently we only try to avoid having a dangling small number of column groups in the last block.
// final int colGroupBlocking = preAggCGs.size();// % 16 < 4 ? 20 : 16;
// final int colGroupBlocking = 8;
final int colGroupBlocking = 4;
final int nColGroups = preAggCGs.size();
// Allocate pre Aggregate Array List
final double[][] preAgg = new double[colGroupBlocking][];
// Allocate temporary Result matrix
// guaranteed to be large enough for all groups
MatrixBlock tmpRes = new MatrixBlock();
// For each row block
for(int rlt = rl; rlt < ru; rlt += rowBlockSize) {
final int rut = Math.min(rlt + rowBlockSize, ru);
// For each column group block
for(int gl = off; gl < nColGroups; gl += colGroupBlocking * skip) {
final int gu = Math.min(gl + (colGroupBlocking * skip), nColGroups);
// For each column group in the current block allocate the pre aggregate array.
// or reset the pre aggregate.
for(int j = gl, p = 0; j < gu; j += skip, p++)
preAllocate(preAggCGs, j, rut, rlt, preAgg, p);
for(int clt = cl; clt < cu; clt += colBZ) {
final int cut = Math.min(clt + colBZ, cu);
for(int j = gl, p = 0; j < gu; j += skip, p++)
preAggregate(that, ret, preAggCGs, rut, rlt, clt, cut, j, preAgg, p);
if(gu == nColGroups)
rowSum(that, rowSum, rlt, rut, clt, cut);
}
// Multiply out the PreAggregate to the output matrix.
for(int j = gl, p = 0; j < gu; j += skip, p++) {
final APreAgg cg = preAggCGs.get(j);
if(cg.getDictionary() instanceof AIdentityDictionary)
continue;
allocateOrResetTmpRes(ret, tmpRes, rowBlockSize);
postMultiply(ret, tmpRes, preAgg, p, cg, rut, rlt);
}
}
}
// LOG.debug("SingleCallLMMTime: " + t.stop());
}
private static void preAllocate(List<APreAgg> preAggCGs, int j, int rut, int rlt, double[][] preAgg, int p) {
final APreAgg cg = preAggCGs.get(j);
if(cg.getDictionary() instanceof AIdentityDictionary)
return;
final int preAggNCol = cg.getPreAggregateSize();
final int len = (rut - rlt) * preAggNCol;
if(preAgg[p] == null || preAgg[p].length < len)
preAgg[p] = new double[len];
else
Arrays.fill(preAgg[p], 0, (rut - rlt) * preAggNCol, 0);
}
private static void preAggregate(MatrixBlock that, MatrixBlock ret, List<APreAgg> preAggCGs, int rut, int rlt,
int clt, int cut, int j, double[][] preAgg, int p) {
final APreAgg cg = preAggCGs.get(j);
if(cg.getDictionary() instanceof IdentityDictionary)
cg.leftMMIdentityPreAggregateDense(that, ret, rlt, rut, clt, cut);
else
cg.preAggregateDense(that, preAgg[p], rlt, rut, clt, cut);
}
private static void postMultiply(MatrixBlock ret, MatrixBlock tmpRes, double[][] preAgg, int p, APreAgg cg, int rut,
int rlt) {
final int preAggNCol = cg.getPreAggregateSize();
final MatrixBlock preAggThis = new MatrixBlock((rut - rlt), preAggNCol, preAgg[p]);
cg.mmWithDictionary(preAggThis, tmpRes, ret, 1, rlt, rut);
}
public static double[] rowSum(MatrixBlock mb, int rl, int ru, int cl, int cu) {
double[] ret = new double[ru];
rowSum(mb, ret, rl, ru, cl, cu);
return ret;
}
private static void rowSum(MatrixBlock mb, double[] rowSum, int rl, int ru, int cl, int cu) {
if(mb.isEmpty())
throw new DMLCompressionException("Invalid empty block to rowsum");
else if(rowSum == null) // no sum to make since the rowSum result is null.
return;
else if(mb.isInSparseFormat())
rowSumSparse(mb.getSparseBlock(), rowSum, rl, ru, cl, cu);
else
rowSumDense(mb, rowSum, rl, ru, cl, cu);
}
private static void rowSumSparse(SparseBlock sb, double[] rowSum, int rl, int ru, int cl, int cu) {
for(int i = rl; i < ru; i++)
rowSumSparseSingleRow(sb, rowSum, cl, cu, i);
}
private static void rowSumSparseSingleRow(SparseBlock sb, double[] rowSum, int cl, int cu, int i) {
if(sb.isEmpty(i))
return;
final int apos = sb.pos(i);
final int alen = sb.size(i) + apos;
final double[] aval = sb.values(i);
final int[] aix = sb.indexes(i);
int j = apos;
while(j < alen && aix[j] < cl)
j++;
if(aix[alen - 1] < cu)
while(j < alen)
rowSum[i] += aval[j++];
else
while(j < alen && aix[j] < cu)
rowSum[i] += aval[j++];
}
private static void rowSumDense(MatrixBlock that, double[] rowSum, int rl, int ru, int cl, int cu) {
final DenseBlock db = that.getDenseBlock();
if(db.isContiguous()) {
final double[] thatV = db.values(0);
for(int r = rl; r < ru; r++)
rowSumDenseSingleRow(rowSum, cl, cu, db, thatV, r);
}
else {
for(int r = rl; r < ru; r++) {
final double[] thatV = db.values(r);
rowSumDenseSingleRow(rowSum, cl, cu, db, thatV, r);
}
}
}
private static void rowSumDenseSingleRow(double[] rowSum, int cl, int cu, final DenseBlock db, final double[] thatV,
int r) {
final int rowOff = db.pos(r);
double tmp = 0;
for(int c = rowOff + cl; c < rowOff + cu; c++)
tmp += thatV[c];
rowSum[r] += tmp;
}
private static class LMMPreAggTask implements Callable<MatrixBlock> {
private final List<APreAgg> _pa;
private final MatrixBlock _that;
private final int _retR;
private final int _retC;
private final int _rl;
private final int _ru;
private final int _cl;
private final int _cu;
private final double[] _rowSums;
private final int _off;
private final int _skip;
protected LMMPreAggTask(List<APreAgg> pa, MatrixBlock that, int retR, int retC, int rl, int ru, int cl, int cu,
int off, int skip, double[] rowSums) {
_pa = pa;
_that = that;
_retR = retR;
_retC = retC;
_rl = rl;
_ru = ru;
_cl = cl;
_cu = cu;
_rowSums = rowSums;
_off = off;
_skip = skip;
}
@Override
public MatrixBlock call() throws Exception {
final double[] tmpArr = new double[_retR * _retC];
MatrixBlock _ret = new MatrixBlock(_retR, _retC, tmpArr);
LMMWithPreAgg(_pa, _that, _ret, _rl, _ru, _cl, _cu, _off, _skip, _rowSums);
return _ret;
}
}
private static class LMMNoPreAggTask implements Callable<MatrixBlock> {
private final AColGroup _cg;
private final MatrixBlock _that;
private final MatrixBlock _ret;
private final int _rl;
private final int _ru;
protected LMMNoPreAggTask(AColGroup cg, MatrixBlock that, int retR, int retC, int rl, int ru) {
_cg = cg;
_that = that;
_ret = new MatrixBlock(retR, retC, false);
_ret.allocateDenseBlock();
_rl = rl;
_ru = ru;
}
@Override
public MatrixBlock call() throws Exception {
_cg.leftMultByMatrixNoPreAgg(_that, _ret, _rl, _ru, 0, _that.getNumColumns());
return _ret;
}
}
private static class LMMRowSums implements Callable<MatrixBlock> {
private final MatrixBlock _that;
private final int _rl;
private final int _ru;
private final double[] _rowSums;
protected LMMRowSums(MatrixBlock that, int rl, int ru, double[] rowSums) {
_that = that;
_rl = rl;
_ru = ru;
_rowSums = rowSums;
}
@Override
public MatrixBlock call() throws Exception {
if(_that.isInSparseFormat())
rowSumSparse(_that.getSparseBlock(), _rowSums, _rl, _ru, 0, _that.getNumColumns());
else
rowSumDense(_that, _rowSums, _rl, _ru, 0, _that.getNumColumns());
return null;
}
}
}
|
apache/hbase | 37,938 | hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import static org.apache.hadoop.hbase.HBaseTestingUtil.START_KEY;
import static org.apache.hadoop.hbase.HBaseTestingUtil.START_KEY_BYTES;
import static org.apache.hadoop.hbase.HBaseTestingUtil.fam1;
import static org.apache.hadoop.hbase.regionserver.Store.PRIORITY_USER;
import static org.apache.hadoop.hbase.regionserver.compactions.CloseChecker.SIZE_LIMIT_KEY;
import static org.apache.hadoop.hbase.regionserver.compactions.CloseChecker.TIME_LIMIT_KEY;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasProperty;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.zip.GZIPInputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ChoreService;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTestConst;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl;
import org.apache.hadoop.hbase.regionserver.compactions.DefaultCompactor;
import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;
import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;
import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.io.IOUtils;
import org.junit.After;
import org.junit.Assume;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.LoggerFactory;
/**
* Test compaction framework and common functions
*/
@Category({ RegionServerTests.class, MediumTests.class })
public class TestCompaction {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestCompaction.class);
@Rule
public TestName name = new TestName();
private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();
protected Configuration conf = UTIL.getConfiguration();
private HRegion r = null;
private TableDescriptor tableDescriptor = null;
private static final byte[] COLUMN_FAMILY = fam1;
private final byte[] STARTROW = Bytes.toBytes(START_KEY);
private static final byte[] COLUMN_FAMILY_TEXT = COLUMN_FAMILY;
private int compactionThreshold;
private byte[] secondRowBytes, thirdRowBytes;
private static final long MAX_FILES_TO_COMPACT = 10;
private final byte[] FAMILY = Bytes.toBytes("cf");
/** constructor */
public TestCompaction() {
// Set cache flush size to 1MB
conf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024 * 1024);
conf.setInt(HConstants.HREGION_MEMSTORE_BLOCK_MULTIPLIER, 100);
conf.setLong(HConstants.COMPACTION_SCANNER_SIZE_MAX, 10L);
conf.set(CompactionThroughputControllerFactory.HBASE_THROUGHPUT_CONTROLLER_KEY,
NoLimitThroughputController.class.getName());
compactionThreshold = conf.getInt("hbase.hstore.compactionThreshold", 3);
secondRowBytes = START_KEY_BYTES.clone();
// Increment the least significant character so we get to next row.
secondRowBytes[START_KEY_BYTES.length - 1]++;
thirdRowBytes = START_KEY_BYTES.clone();
thirdRowBytes[START_KEY_BYTES.length - 1] =
(byte) (thirdRowBytes[START_KEY_BYTES.length - 1] + 2);
}
@Before
public void setUp() throws Exception {
TableDescriptorBuilder builder = UTIL.createModifyableTableDescriptor(name.getMethodName());
if (name.getMethodName().equals("testCompactionSeqId")) {
UTIL.getConfiguration().set("hbase.hstore.compaction.kv.max", "10");
UTIL.getConfiguration().set(DefaultStoreEngine.DEFAULT_COMPACTOR_CLASS_KEY,
DummyCompactor.class.getName());
ColumnFamilyDescriptor familyDescriptor =
ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setMaxVersions(65536).build();
builder.setColumnFamily(familyDescriptor);
}
if (name.getMethodName().equals("testCompactionWithCorruptBlock")) {
UTIL.getConfiguration().setBoolean("hbase.hstore.validate.read_fully", true);
ColumnFamilyDescriptor familyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(FAMILY)
.setCompressionType(Compression.Algorithm.GZ).build();
builder.setColumnFamily(familyDescriptor);
}
this.tableDescriptor = builder.build();
this.r = UTIL.createLocalHRegion(tableDescriptor, null, null);
}
@After
public void tearDown() throws Exception {
WAL wal = r.getWAL();
this.r.close();
wal.close();
}
/**
* Verify that you can stop a long-running compaction (used during RS shutdown)
*/
@Test
public void testInterruptCompactionBySize() throws Exception {
assertEquals(0, count());
// lower the polling interval for this test
conf.setInt(SIZE_LIMIT_KEY, 10 * 1000 /* 10 KB */);
try {
// Create a couple store files w/ 15KB (over 10KB interval)
int jmax = (int) Math.ceil(15.0 / compactionThreshold);
byte[] pad = new byte[1000]; // 1 KB chunk
for (int i = 0; i < compactionThreshold; i++) {
Table loader = new RegionAsTable(r);
Put p = new Put(Bytes.add(STARTROW, Bytes.toBytes(i)));
p.setDurability(Durability.SKIP_WAL);
for (int j = 0; j < jmax; j++) {
p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad);
}
HTestConst.addContent(loader, Bytes.toString(COLUMN_FAMILY));
loader.put(p);
r.flush(true);
}
HRegion spyR = spy(r);
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
r.writestate.writesEnabled = false;
return invocation.callRealMethod();
}
}).when(spyR).doRegionCompactionPrep();
// force a minor compaction, but not before requesting a stop
spyR.compactStores();
// ensure that the compaction stopped, all old files are intact,
HStore s = r.getStore(COLUMN_FAMILY);
assertEquals(compactionThreshold, s.getStorefilesCount());
assertTrue(s.getStorefilesSize() > 15 * 1000);
// and no new store files persisted past compactStores()
// only one empty dir exists in temp dir
FileStatus[] ls = r.getFilesystem().listStatus(r.getRegionFileSystem().getTempDir());
assertEquals(1, ls.length);
Path storeTempDir =
new Path(r.getRegionFileSystem().getTempDir(), Bytes.toString(COLUMN_FAMILY));
assertTrue(r.getFilesystem().exists(storeTempDir));
ls = r.getFilesystem().listStatus(storeTempDir);
assertEquals(0, ls.length);
} finally {
// don't mess up future tests
r.writestate.writesEnabled = true;
conf.setInt(SIZE_LIMIT_KEY, 10 * 1000 * 1000 /* 10 MB */);
// Delete all Store information once done using
for (int i = 0; i < compactionThreshold; i++) {
Delete delete = new Delete(Bytes.add(STARTROW, Bytes.toBytes(i)));
byte[][] famAndQf = { COLUMN_FAMILY, null };
delete.addFamily(famAndQf[0]);
r.delete(delete);
}
r.flush(true);
// Multiple versions allowed for an entry, so the delete isn't enough
// Lower TTL and expire to ensure that all our entries have been wiped
final int ttl = 1000;
for (HStore store : this.r.stores.values()) {
ScanInfo old = store.getScanInfo();
ScanInfo si = old.customize(old.getMaxVersions(), ttl, old.getKeepDeletedCells());
store.setScanInfo(si);
}
Thread.sleep(ttl);
r.compact(true);
assertEquals(0, count());
}
}
@Test
public void testInterruptCompactionByTime() throws Exception {
assertEquals(0, count());
// lower the polling interval for this test
conf.setLong(TIME_LIMIT_KEY, 1 /* 1ms */);
try {
// Create a couple store files w/ 15KB (over 10KB interval)
int jmax = (int) Math.ceil(15.0 / compactionThreshold);
byte[] pad = new byte[1000]; // 1 KB chunk
for (int i = 0; i < compactionThreshold; i++) {
Table loader = new RegionAsTable(r);
Put p = new Put(Bytes.add(STARTROW, Bytes.toBytes(i)));
p.setDurability(Durability.SKIP_WAL);
for (int j = 0; j < jmax; j++) {
p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad);
}
HTestConst.addContent(loader, Bytes.toString(COLUMN_FAMILY));
loader.put(p);
r.flush(true);
}
HRegion spyR = spy(r);
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
r.writestate.writesEnabled = false;
return invocation.callRealMethod();
}
}).when(spyR).doRegionCompactionPrep();
// force a minor compaction, but not before requesting a stop
spyR.compactStores();
// ensure that the compaction stopped, all old files are intact,
HStore s = r.getStore(COLUMN_FAMILY);
assertEquals(compactionThreshold, s.getStorefilesCount());
assertTrue(s.getStorefilesSize() > 15 * 1000);
// and no new store files persisted past compactStores()
// only one empty dir exists in temp dir
FileStatus[] ls = r.getFilesystem().listStatus(r.getRegionFileSystem().getTempDir());
assertEquals(1, ls.length);
Path storeTempDir =
new Path(r.getRegionFileSystem().getTempDir(), Bytes.toString(COLUMN_FAMILY));
assertTrue(r.getFilesystem().exists(storeTempDir));
ls = r.getFilesystem().listStatus(storeTempDir);
assertEquals(0, ls.length);
} finally {
// don't mess up future tests
r.writestate.writesEnabled = true;
conf.setLong(TIME_LIMIT_KEY, 10 * 1000L /* 10 s */);
// Delete all Store information once done using
for (int i = 0; i < compactionThreshold; i++) {
Delete delete = new Delete(Bytes.add(STARTROW, Bytes.toBytes(i)));
byte[][] famAndQf = { COLUMN_FAMILY, null };
delete.addFamily(famAndQf[0]);
r.delete(delete);
}
r.flush(true);
// Multiple versions allowed for an entry, so the delete isn't enough
// Lower TTL and expire to ensure that all our entries have been wiped
final int ttl = 1000;
for (HStore store : this.r.stores.values()) {
ScanInfo old = store.getScanInfo();
ScanInfo si = old.customize(old.getMaxVersions(), ttl, old.getKeepDeletedCells());
store.setScanInfo(si);
}
Thread.sleep(ttl);
r.compact(true);
assertEquals(0, count());
}
}
private int count() throws IOException {
int count = 0;
for (HStoreFile f : this.r.stores.get(COLUMN_FAMILY_TEXT).getStorefiles()) {
f.initReader();
try (StoreFileScanner scanner = f.getPreadScanner(false, Long.MAX_VALUE, 0, false)) {
scanner.seek(KeyValue.LOWESTKEY);
while (scanner.next() != null) {
count++;
}
}
}
return count;
}
private void createStoreFile(final HRegion region) throws IOException {
createStoreFile(region, Bytes.toString(COLUMN_FAMILY));
}
private void createStoreFile(final HRegion region, String family) throws IOException {
Table loader = new RegionAsTable(region);
HTestConst.addContent(loader, family);
region.flush(true);
}
@Test
public void testCompactionWithCorruptResult() throws Exception {
int nfiles = 10;
for (int i = 0; i < nfiles; i++) {
createStoreFile(r);
}
HStore store = r.getStore(COLUMN_FAMILY);
Collection<HStoreFile> storeFiles = store.getStorefiles();
DefaultCompactor tool = (DefaultCompactor) store.storeEngine.getCompactor();
CompactionRequestImpl request = new CompactionRequestImpl(storeFiles);
tool.compact(request, NoLimitThroughputController.INSTANCE, null);
// Now lets corrupt the compacted file.
FileSystem fs = store.getFileSystem();
// default compaction policy created one and only one new compacted file
Path tmpPath = store.getRegionFileSystem().createTempName();
try (FSDataOutputStream stream = fs.create(tmpPath, null, true, 512, (short) 3, 1024L, null)) {
stream.writeChars("CORRUPT FILE!!!!");
}
// The complete compaction should fail and the corrupt file should remain
// in the 'tmp' directory;
assertThrows(IOException.class, () -> store.doCompaction(null, null, null,
EnvironmentEdgeManager.currentTime(), Collections.singletonList(tmpPath)));
assertTrue(fs.exists(tmpPath));
}
/**
* This test uses a hand-modified HFile, which is loaded in from the resources' path. That file
* was generated from the test support code in this class and then edited to corrupt the
* GZ-encoded block by zeroing-out the first two bytes of the GZip header, the "standard
* declaration" of {@code 1f 8b}, found at offset 33 in the file. I'm not sure why, but it seems
* that in this test context we do not enforce CRC checksums. Thus, this corruption manifests in
* the Decompressor rather than in the reader when it loads the block bytes and compares vs. the
* header.
*/
@Test
public void testCompactionWithCorruptBlock() throws Exception {
createStoreFile(r, Bytes.toString(FAMILY));
createStoreFile(r, Bytes.toString(FAMILY));
HStore store = r.getStore(FAMILY);
Collection<HStoreFile> storeFiles = store.getStorefiles();
DefaultCompactor tool = (DefaultCompactor) store.storeEngine.getCompactor();
CompactionRequestImpl request = new CompactionRequestImpl(storeFiles);
tool.compact(request, NoLimitThroughputController.INSTANCE, null);
// insert the hfile with a corrupted data block into the region's tmp directory, where
// compaction output is collected.
FileSystem fs = store.getFileSystem();
Path tmpPath = store.getRegionFileSystem().createTempName();
try (
InputStream inputStream =
getClass().getResourceAsStream("TestCompaction_HFileWithCorruptBlock.gz");
GZIPInputStream gzipInputStream = new GZIPInputStream(Objects.requireNonNull(inputStream));
OutputStream outputStream = fs.create(tmpPath, null, true, 512, (short) 3, 1024L, null)) {
assertThat(gzipInputStream, notNullValue());
assertThat(outputStream, notNullValue());
IOUtils.copyBytes(gzipInputStream, outputStream, 512);
}
LoggerFactory.getLogger(TestCompaction.class).info("Wrote corrupted HFile to {}", tmpPath);
// The complete compaction should fail and the corrupt file should remain
// in the 'tmp' directory;
try {
store.doCompaction(request, storeFiles, null, EnvironmentEdgeManager.currentTime(),
Collections.singletonList(tmpPath));
} catch (IOException e) {
Throwable rootCause = e;
while (rootCause.getCause() != null) {
rootCause = rootCause.getCause();
}
assertThat(rootCause, allOf(instanceOf(IOException.class),
hasProperty("message", containsString("not a gzip file"))));
assertTrue(fs.exists(tmpPath));
return;
}
fail("Compaction should have failed due to corrupt block");
}
/**
* Create a custom compaction request and be sure that we can track it through the queue, knowing
* when the compaction is completed.
*/
@Test
public void testTrackingCompactionRequest() throws Exception {
// setup a compact/split thread on a mock server
HRegionServer mockServer = Mockito.mock(HRegionServer.class);
Mockito.when(mockServer.getConfiguration()).thenReturn(r.getBaseConf());
CompactSplit thread = new CompactSplit(mockServer);
Mockito.when(mockServer.getCompactSplitThread()).thenReturn(thread);
// setup a region/store with some files
HStore store = r.getStore(COLUMN_FAMILY);
createStoreFile(r);
for (int i = 0; i < MAX_FILES_TO_COMPACT + 1; i++) {
createStoreFile(r);
}
CountDownLatch latch = new CountDownLatch(1);
Tracker tracker = new Tracker(latch);
thread.requestCompaction(r, store, "test custom comapction", PRIORITY_USER, tracker, null);
// wait for the latch to complete.
latch.await();
thread.interruptIfNecessary();
}
@Test
public void testCompactionFailure() throws Exception {
// setup a compact/split thread on a mock server
HRegionServer mockServer = Mockito.mock(HRegionServer.class);
Mockito.when(mockServer.getConfiguration()).thenReturn(r.getBaseConf());
CompactSplit thread = new CompactSplit(mockServer);
Mockito.when(mockServer.getCompactSplitThread()).thenReturn(thread);
// setup a region/store with some files
HStore store = r.getStore(COLUMN_FAMILY);
createStoreFile(r);
for (int i = 0; i < HStore.DEFAULT_BLOCKING_STOREFILE_COUNT - 1; i++) {
createStoreFile(r);
}
HRegion mockRegion = Mockito.spy(r);
Mockito.when(mockRegion.checkSplit())
.thenThrow(new RuntimeException("Thrown intentionally by test!"));
try (MetricsRegionWrapperImpl metricsWrapper = new MetricsRegionWrapperImpl(r)) {
long preCompletedCount = metricsWrapper.getNumCompactionsCompleted();
long preFailedCount = metricsWrapper.getNumCompactionsFailed();
CountDownLatch latch = new CountDownLatch(1);
Tracker tracker = new Tracker(latch);
thread.requestCompaction(mockRegion, store, "test custom comapction", PRIORITY_USER, tracker,
null);
// wait for the latch to complete.
latch.await(120, TimeUnit.SECONDS);
// compaction should have completed and been marked as failed due to error in split request
long postCompletedCount = metricsWrapper.getNumCompactionsCompleted();
long postFailedCount = metricsWrapper.getNumCompactionsFailed();
assertTrue("Completed count should have increased (pre=" + preCompletedCount + ", post="
+ postCompletedCount + ")", postCompletedCount > preCompletedCount);
assertTrue("Failed count should have increased (pre=" + preFailedCount + ", post="
+ postFailedCount + ")", postFailedCount > preFailedCount);
}
}
/**
* Test no new Compaction requests are generated after calling stop compactions
*/
@Test
public void testStopStartCompaction() throws IOException {
// setup a compact/split thread on a mock server
HRegionServer mockServer = Mockito.mock(HRegionServer.class);
Mockito.when(mockServer.getConfiguration()).thenReturn(r.getBaseConf());
final CompactSplit thread = new CompactSplit(mockServer);
Mockito.when(mockServer.getCompactSplitThread()).thenReturn(thread);
// setup a region/store with some files
HStore store = r.getStore(COLUMN_FAMILY);
createStoreFile(r);
for (int i = 0; i < HStore.DEFAULT_BLOCKING_STOREFILE_COUNT - 1; i++) {
createStoreFile(r);
}
thread.switchCompaction(false);
thread.requestCompaction(r, store, "test", Store.PRIORITY_USER,
CompactionLifeCycleTracker.DUMMY, null);
assertFalse(thread.isCompactionsEnabled());
int longCompactions = thread.getLongCompactions().getActiveCount();
int shortCompactions = thread.getShortCompactions().getActiveCount();
assertEquals(
"longCompactions=" + longCompactions + "," + "shortCompactions=" + shortCompactions, 0,
longCompactions + shortCompactions);
thread.switchCompaction(true);
assertTrue(thread.isCompactionsEnabled());
// Make sure no compactions have run.
assertEquals(0, thread.getLongCompactions().getCompletedTaskCount()
+ thread.getShortCompactions().getCompletedTaskCount());
// Request a compaction and make sure it is submitted successfully.
thread.requestCompaction(r, store, "test", Store.PRIORITY_USER,
CompactionLifeCycleTracker.DUMMY, null);
// Wait until the compaction finishes.
Waiter.waitFor(UTIL.getConfiguration(), 5000,
(Waiter.Predicate<Exception>) () -> thread.getLongCompactions().getCompletedTaskCount()
+ thread.getShortCompactions().getCompletedTaskCount() == 1);
// Make sure there are no compactions running.
assertEquals(0,
thread.getLongCompactions().getActiveCount() + thread.getShortCompactions().getActiveCount());
}
@Test
public void testInterruptingRunningCompactions() throws Exception {
// setup a compact/split thread on a mock server
conf.set(CompactionThroughputControllerFactory.HBASE_THROUGHPUT_CONTROLLER_KEY,
WaitThroughPutController.class.getName());
HRegionServer mockServer = Mockito.mock(HRegionServer.class);
Mockito.when(mockServer.getConfiguration()).thenReturn(r.getBaseConf());
CompactSplit thread = new CompactSplit(mockServer);
Mockito.when(mockServer.getCompactSplitThread()).thenReturn(thread);
// setup a region/store with some files
HStore store = r.getStore(COLUMN_FAMILY);
int jmax = (int) Math.ceil(15.0 / compactionThreshold);
byte[] pad = new byte[1000]; // 1 KB chunk
for (int i = 0; i < compactionThreshold; i++) {
Table loader = new RegionAsTable(r);
Put p = new Put(Bytes.add(STARTROW, Bytes.toBytes(i)));
p.setDurability(Durability.SKIP_WAL);
for (int j = 0; j < jmax; j++) {
p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad);
}
HTestConst.addContent(loader, Bytes.toString(COLUMN_FAMILY));
loader.put(p);
r.flush(true);
}
HStore s = r.getStore(COLUMN_FAMILY);
int initialFiles = s.getStorefilesCount();
thread.requestCompaction(r, store, "test custom comapction", PRIORITY_USER,
CompactionLifeCycleTracker.DUMMY, null);
Thread.sleep(3000);
thread.switchCompaction(false);
assertEquals(initialFiles, s.getStorefilesCount());
// don't mess up future tests
thread.switchCompaction(true);
}
/**
* HBASE-7947: Regression test to ensure adding to the correct list in the {@link CompactSplit}
* @throws Exception on failure
*/
@Test
public void testMultipleCustomCompactionRequests() throws Exception {
// setup a compact/split thread on a mock server
HRegionServer mockServer = Mockito.mock(HRegionServer.class);
Mockito.when(mockServer.getConfiguration()).thenReturn(r.getBaseConf());
CompactSplit thread = new CompactSplit(mockServer);
Mockito.when(mockServer.getCompactSplitThread()).thenReturn(thread);
// setup a region/store with some files
int numStores = r.getStores().size();
CountDownLatch latch = new CountDownLatch(numStores);
Tracker tracker = new Tracker(latch);
// create some store files and setup requests for each store on which we want to do a
// compaction
for (HStore store : r.getStores()) {
createStoreFile(r, store.getColumnFamilyName());
createStoreFile(r, store.getColumnFamilyName());
createStoreFile(r, store.getColumnFamilyName());
thread.requestCompaction(r, store, "test mulitple custom comapctions", PRIORITY_USER, tracker,
null);
}
// wait for the latch to complete.
latch.await();
thread.interruptIfNecessary();
}
class StoreMockMaker extends StatefulStoreMockMaker {
public ArrayList<HStoreFile> compacting = new ArrayList<>();
public ArrayList<HStoreFile> notCompacting = new ArrayList<>();
private final ArrayList<Integer> results;
public StoreMockMaker(ArrayList<Integer> results) {
this.results = results;
}
public class TestCompactionContext extends CompactionContext {
private List<HStoreFile> selectedFiles;
public TestCompactionContext(List<HStoreFile> selectedFiles) {
super();
this.selectedFiles = selectedFiles;
}
@Override
public List<HStoreFile> preSelect(List<HStoreFile> filesCompacting) {
return new ArrayList<>();
}
@Override
public boolean select(List<HStoreFile> filesCompacting, boolean isUserCompaction,
boolean mayUseOffPeak, boolean forceMajor) throws IOException {
this.request = new CompactionRequestImpl(selectedFiles);
this.request.setPriority(getPriority());
return true;
}
@Override
public List<Path> compact(ThroughputController throughputController, User user)
throws IOException {
finishCompaction(this.selectedFiles);
return new ArrayList<>();
}
}
@Override
public synchronized Optional<CompactionContext> selectCompaction() {
CompactionContext ctx = new TestCompactionContext(new ArrayList<>(notCompacting));
compacting.addAll(notCompacting);
notCompacting.clear();
try {
ctx.select(null, false, false, false);
} catch (IOException ex) {
fail("Shouldn't happen");
}
return Optional.of(ctx);
}
@Override
public synchronized void cancelCompaction(Object object) {
TestCompactionContext ctx = (TestCompactionContext) object;
compacting.removeAll(ctx.selectedFiles);
notCompacting.addAll(ctx.selectedFiles);
}
public synchronized void finishCompaction(List<HStoreFile> sfs) {
if (sfs.isEmpty()) return;
synchronized (results) {
results.add(sfs.size());
}
compacting.removeAll(sfs);
}
@Override
public int getPriority() {
return 7 - compacting.size() - notCompacting.size();
}
}
public class BlockingStoreMockMaker extends StatefulStoreMockMaker {
BlockingCompactionContext blocked = null;
public class BlockingCompactionContext extends CompactionContext {
public volatile boolean isInCompact = false;
public void unblock() {
synchronized (this) {
this.notifyAll();
}
}
@Override
public List<Path> compact(ThroughputController throughputController, User user)
throws IOException {
try {
isInCompact = true;
synchronized (this) {
this.wait();
}
} catch (InterruptedException e) {
Assume.assumeNoException(e);
}
return new ArrayList<>();
}
@Override
public List<HStoreFile> preSelect(List<HStoreFile> filesCompacting) {
return new ArrayList<>();
}
@Override
public boolean select(List<HStoreFile> f, boolean i, boolean m, boolean e)
throws IOException {
this.request = new CompactionRequestImpl(new ArrayList<>());
return true;
}
}
@Override
public Optional<CompactionContext> selectCompaction() {
this.blocked = new BlockingCompactionContext();
try {
this.blocked.select(null, false, false, false);
} catch (IOException ex) {
fail("Shouldn't happen");
}
return Optional.of(blocked);
}
@Override
public void cancelCompaction(Object object) {
}
@Override
public int getPriority() {
return Integer.MIN_VALUE; // some invalid value, see createStoreMock
}
public BlockingCompactionContext waitForBlocking() {
while (this.blocked == null || !this.blocked.isInCompact) {
Threads.sleepWithoutInterrupt(50);
}
BlockingCompactionContext ctx = this.blocked;
this.blocked = null;
return ctx;
}
@Override
public HStore createStoreMock(String name) throws Exception {
return createStoreMock(Integer.MIN_VALUE, name);
}
public HStore createStoreMock(int priority, String name) throws Exception {
// Override the mock to always return the specified priority.
HStore s = super.createStoreMock(name);
when(s.getCompactPriority()).thenReturn(priority);
return s;
}
}
/** Test compaction priority management and multiple compactions per store (HBASE-8665). */
@Test
public void testCompactionQueuePriorities() throws Exception {
// Setup a compact/split thread on a mock server.
final Configuration conf = HBaseConfiguration.create();
HRegionServer mockServer = mock(HRegionServer.class);
when(mockServer.isStopped()).thenReturn(false);
when(mockServer.getConfiguration()).thenReturn(conf);
when(mockServer.getChoreService()).thenReturn(new ChoreService("test"));
CompactSplit cst = new CompactSplit(mockServer);
when(mockServer.getCompactSplitThread()).thenReturn(cst);
// prevent large compaction thread pool stealing job from small compaction queue.
cst.shutdownLongCompactions();
// Set up the region mock that redirects compactions.
HRegion r = mock(HRegion.class);
when(r.compact(any(), any(), any(), any())).then(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocation) throws Throwable {
invocation.<CompactionContext> getArgument(0).compact(invocation.getArgument(2), null);
return true;
}
});
// Set up store mocks for 2 "real" stores and the one we use for blocking CST.
ArrayList<Integer> results = new ArrayList<>();
StoreMockMaker sm = new StoreMockMaker(results), sm2 = new StoreMockMaker(results);
HStore store = sm.createStoreMock("store1");
HStore store2 = sm2.createStoreMock("store2");
BlockingStoreMockMaker blocker = new BlockingStoreMockMaker();
// First, block the compaction thread so that we could muck with queue.
cst.requestSystemCompaction(r, blocker.createStoreMock(1, "b-pri1"), "b-pri1");
BlockingStoreMockMaker.BlockingCompactionContext currentBlock = blocker.waitForBlocking();
// Add 4 files to store1, 3 to store2, and queue compactions; pri 3 and 4 respectively.
for (int i = 0; i < 4; ++i) {
sm.notCompacting.add(createFile());
}
cst.requestSystemCompaction(r, store, "s1-pri3");
for (int i = 0; i < 3; ++i) {
sm2.notCompacting.add(createFile());
}
cst.requestSystemCompaction(r, store2, "s2-pri4");
// Now add 2 more files to store1 and queue compaction - pri 1.
for (int i = 0; i < 2; ++i) {
sm.notCompacting.add(createFile());
}
cst.requestSystemCompaction(r, store, "s1-pri1");
// Finally add blocking compaction with priority 2.
cst.requestSystemCompaction(r, blocker.createStoreMock(2, "b-pri2"), "b-pri2");
// Unblock the blocking compaction; we should run pri1 and become block again in pri2.
currentBlock.unblock();
currentBlock = blocker.waitForBlocking();
// Pri1 should have "compacted" all 6 files.
assertEquals(1, results.size());
assertEquals(6, results.get(0).intValue());
// Add 2 files to store 1 (it has 2 files now).
for (int i = 0; i < 2; ++i) {
sm.notCompacting.add(createFile());
}
// Now we have pri4 for store 2 in queue, and pri3 for store1; store1's current priority
// is 5, however, so it must not preempt store 2. Add blocking compaction at the end.
cst.requestSystemCompaction(r, blocker.createStoreMock(7, "b-pri7"), "b-pri7");
currentBlock.unblock();
currentBlock = blocker.waitForBlocking();
assertEquals(3, results.size());
assertEquals(3, results.get(1).intValue()); // 3 files should go before 2 files.
assertEquals(2, results.get(2).intValue());
currentBlock.unblock();
cst.interruptIfNecessary();
}
/**
* Firstly write 10 cells (with different time stamp) to a qualifier and flush to hfile1, then
* write 10 cells (with different time stamp) to the same qualifier and flush to hfile2. The
* latest cell (cell-A) in hfile1 and the oldest cell (cell-B) in hfile2 are with the same time
* stamp but different sequence id, and will get scanned successively during compaction.
* <p/>
* We set compaction.kv.max to 10 so compaction will scan 10 versions each round, meanwhile we set
* keepSeqIdPeriod=0 in {@link DummyCompactor} so all 10 versions of hfile2 will be written out
* with seqId cleaned (set to 0) including cell-B, then when scanner goes to cell-A it will cause
* a scan out-of-order assertion error before HBASE-16931 if error occurs during the test
*/
@Test
public void testCompactionSeqId() throws Exception {
final byte[] ROW = Bytes.toBytes("row");
final byte[] QUALIFIER = Bytes.toBytes("qualifier");
long timestamp = 10000;
// row1/cf:a/10009/Put/vlen=2/seqid=11 V: v9
// row1/cf:a/10008/Put/vlen=2/seqid=10 V: v8
// row1/cf:a/10007/Put/vlen=2/seqid=9 V: v7
// row1/cf:a/10006/Put/vlen=2/seqid=8 V: v6
// row1/cf:a/10005/Put/vlen=2/seqid=7 V: v5
// row1/cf:a/10004/Put/vlen=2/seqid=6 V: v4
// row1/cf:a/10003/Put/vlen=2/seqid=5 V: v3
// row1/cf:a/10002/Put/vlen=2/seqid=4 V: v2
// row1/cf:a/10001/Put/vlen=2/seqid=3 V: v1
// row1/cf:a/10000/Put/vlen=2/seqid=2 V: v0
for (int i = 0; i < 10; i++) {
Put put = new Put(ROW);
put.addColumn(FAMILY, QUALIFIER, timestamp + i, Bytes.toBytes("v" + i));
r.put(put);
}
r.flush(true);
// row1/cf:a/10018/Put/vlen=3/seqid=16 V: v18
// row1/cf:a/10017/Put/vlen=3/seqid=17 V: v17
// row1/cf:a/10016/Put/vlen=3/seqid=18 V: v16
// row1/cf:a/10015/Put/vlen=3/seqid=19 V: v15
// row1/cf:a/10014/Put/vlen=3/seqid=20 V: v14
// row1/cf:a/10013/Put/vlen=3/seqid=21 V: v13
// row1/cf:a/10012/Put/vlen=3/seqid=22 V: v12
// row1/cf:a/10011/Put/vlen=3/seqid=23 V: v11
// row1/cf:a/10010/Put/vlen=3/seqid=24 V: v10
// row1/cf:a/10009/Put/vlen=2/seqid=25 V: v9
for (int i = 18; i > 8; i--) {
Put put = new Put(ROW);
put.addColumn(FAMILY, QUALIFIER, timestamp + i, Bytes.toBytes("v" + i));
r.put(put);
}
r.flush(true);
r.compact(true);
}
public static class DummyCompactor extends DefaultCompactor {
public DummyCompactor(Configuration conf, HStore store) {
super(conf, store);
this.keepSeqIdPeriod = 0;
}
}
private static HStoreFile createFile() throws Exception {
HStoreFile sf = mock(HStoreFile.class);
when(sf.getPath()).thenReturn(new Path("file"));
StoreFileReader r = mock(StoreFileReader.class);
when(r.length()).thenReturn(10L);
when(sf.getReader()).thenReturn(r);
return sf;
}
/**
* Simple {@link CompactionLifeCycleTracker} on which you can wait until the requested compaction
* finishes.
*/
public static class Tracker implements CompactionLifeCycleTracker {
private final CountDownLatch done;
public Tracker(CountDownLatch done) {
this.done = done;
}
@Override
public void afterExecution(Store store) {
done.countDown();
}
}
/**
* Simple {@link CompactionLifeCycleTracker} on which you can wait until the requested compaction
* finishes.
*/
public static class WaitThroughPutController extends NoLimitThroughputController {
public WaitThroughPutController() {
}
@Override
public long control(String compactionName, long size) throws InterruptedException {
Thread.sleep(6000000);
return 6000000;
}
}
}
|
apache/hive | 37,781 | standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/CommitTxnRequest.java | /**
* Autogenerated by Thrift Compiler (0.16.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.hadoop.hive.metastore.api;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.16.0)")
@org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public class CommitTxnRequest implements org.apache.thrift.TBase<CommitTxnRequest, CommitTxnRequest._Fields>, java.io.Serializable, Cloneable, Comparable<CommitTxnRequest> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("CommitTxnRequest");
private static final org.apache.thrift.protocol.TField TXNID_FIELD_DESC = new org.apache.thrift.protocol.TField("txnid", org.apache.thrift.protocol.TType.I64, (short)1);
private static final org.apache.thrift.protocol.TField REPL_POLICY_FIELD_DESC = new org.apache.thrift.protocol.TField("replPolicy", org.apache.thrift.protocol.TType.STRING, (short)2);
private static final org.apache.thrift.protocol.TField WRITE_EVENT_INFOS_FIELD_DESC = new org.apache.thrift.protocol.TField("writeEventInfos", org.apache.thrift.protocol.TType.LIST, (short)3);
private static final org.apache.thrift.protocol.TField REPL_LAST_ID_INFO_FIELD_DESC = new org.apache.thrift.protocol.TField("replLastIdInfo", org.apache.thrift.protocol.TType.STRUCT, (short)4);
private static final org.apache.thrift.protocol.TField KEY_VALUE_FIELD_DESC = new org.apache.thrift.protocol.TField("keyValue", org.apache.thrift.protocol.TType.STRUCT, (short)5);
private static final org.apache.thrift.protocol.TField EXCL_WRITE_ENABLED_FIELD_DESC = new org.apache.thrift.protocol.TField("exclWriteEnabled", org.apache.thrift.protocol.TType.BOOL, (short)6);
private static final org.apache.thrift.protocol.TField TXN_TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("txn_type", org.apache.thrift.protocol.TType.I32, (short)7);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new CommitTxnRequestStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new CommitTxnRequestTupleSchemeFactory();
private long txnid; // required
private @org.apache.thrift.annotation.Nullable java.lang.String replPolicy; // optional
private @org.apache.thrift.annotation.Nullable java.util.List<WriteEventInfo> writeEventInfos; // optional
private @org.apache.thrift.annotation.Nullable ReplLastIdInfo replLastIdInfo; // optional
private @org.apache.thrift.annotation.Nullable CommitTxnKeyValue keyValue; // optional
private boolean exclWriteEnabled; // optional
private @org.apache.thrift.annotation.Nullable TxnType txn_type; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
TXNID((short)1, "txnid"),
REPL_POLICY((short)2, "replPolicy"),
WRITE_EVENT_INFOS((short)3, "writeEventInfos"),
REPL_LAST_ID_INFO((short)4, "replLastIdInfo"),
KEY_VALUE((short)5, "keyValue"),
EXCL_WRITE_ENABLED((short)6, "exclWriteEnabled"),
/**
*
* @see TxnType
*/
TXN_TYPE((short)7, "txn_type");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // TXNID
return TXNID;
case 2: // REPL_POLICY
return REPL_POLICY;
case 3: // WRITE_EVENT_INFOS
return WRITE_EVENT_INFOS;
case 4: // REPL_LAST_ID_INFO
return REPL_LAST_ID_INFO;
case 5: // KEY_VALUE
return KEY_VALUE;
case 6: // EXCL_WRITE_ENABLED
return EXCL_WRITE_ENABLED;
case 7: // TXN_TYPE
return TXN_TYPE;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __TXNID_ISSET_ID = 0;
private static final int __EXCLWRITEENABLED_ISSET_ID = 1;
private byte __isset_bitfield = 0;
private static final _Fields optionals[] = {_Fields.REPL_POLICY,_Fields.WRITE_EVENT_INFOS,_Fields.REPL_LAST_ID_INFO,_Fields.KEY_VALUE,_Fields.EXCL_WRITE_ENABLED,_Fields.TXN_TYPE};
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.TXNID, new org.apache.thrift.meta_data.FieldMetaData("txnid", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.REPL_POLICY, new org.apache.thrift.meta_data.FieldMetaData("replPolicy", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.WRITE_EVENT_INFOS, new org.apache.thrift.meta_data.FieldMetaData("writeEventInfos", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, WriteEventInfo.class))));
tmpMap.put(_Fields.REPL_LAST_ID_INFO, new org.apache.thrift.meta_data.FieldMetaData("replLastIdInfo", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, ReplLastIdInfo.class)));
tmpMap.put(_Fields.KEY_VALUE, new org.apache.thrift.meta_data.FieldMetaData("keyValue", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, CommitTxnKeyValue.class)));
tmpMap.put(_Fields.EXCL_WRITE_ENABLED, new org.apache.thrift.meta_data.FieldMetaData("exclWriteEnabled", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
tmpMap.put(_Fields.TXN_TYPE, new org.apache.thrift.meta_data.FieldMetaData("txn_type", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, TxnType.class)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(CommitTxnRequest.class, metaDataMap);
}
public CommitTxnRequest() {
this.exclWriteEnabled = true;
}
public CommitTxnRequest(
long txnid)
{
this();
this.txnid = txnid;
setTxnidIsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public CommitTxnRequest(CommitTxnRequest other) {
__isset_bitfield = other.__isset_bitfield;
this.txnid = other.txnid;
if (other.isSetReplPolicy()) {
this.replPolicy = other.replPolicy;
}
if (other.isSetWriteEventInfos()) {
java.util.List<WriteEventInfo> __this__writeEventInfos = new java.util.ArrayList<WriteEventInfo>(other.writeEventInfos.size());
for (WriteEventInfo other_element : other.writeEventInfos) {
__this__writeEventInfos.add(new WriteEventInfo(other_element));
}
this.writeEventInfos = __this__writeEventInfos;
}
if (other.isSetReplLastIdInfo()) {
this.replLastIdInfo = new ReplLastIdInfo(other.replLastIdInfo);
}
if (other.isSetKeyValue()) {
this.keyValue = new CommitTxnKeyValue(other.keyValue);
}
this.exclWriteEnabled = other.exclWriteEnabled;
if (other.isSetTxn_type()) {
this.txn_type = other.txn_type;
}
}
public CommitTxnRequest deepCopy() {
return new CommitTxnRequest(this);
}
@Override
public void clear() {
setTxnidIsSet(false);
this.txnid = 0;
this.replPolicy = null;
this.writeEventInfos = null;
this.replLastIdInfo = null;
this.keyValue = null;
this.exclWriteEnabled = true;
this.txn_type = null;
}
public long getTxnid() {
return this.txnid;
}
public void setTxnid(long txnid) {
this.txnid = txnid;
setTxnidIsSet(true);
}
public void unsetTxnid() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __TXNID_ISSET_ID);
}
/** Returns true if field txnid is set (has been assigned a value) and false otherwise */
public boolean isSetTxnid() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __TXNID_ISSET_ID);
}
public void setTxnidIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __TXNID_ISSET_ID, value);
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getReplPolicy() {
return this.replPolicy;
}
public void setReplPolicy(@org.apache.thrift.annotation.Nullable java.lang.String replPolicy) {
this.replPolicy = replPolicy;
}
public void unsetReplPolicy() {
this.replPolicy = null;
}
/** Returns true if field replPolicy is set (has been assigned a value) and false otherwise */
public boolean isSetReplPolicy() {
return this.replPolicy != null;
}
public void setReplPolicyIsSet(boolean value) {
if (!value) {
this.replPolicy = null;
}
}
public int getWriteEventInfosSize() {
return (this.writeEventInfos == null) ? 0 : this.writeEventInfos.size();
}
@org.apache.thrift.annotation.Nullable
public java.util.Iterator<WriteEventInfo> getWriteEventInfosIterator() {
return (this.writeEventInfos == null) ? null : this.writeEventInfos.iterator();
}
public void addToWriteEventInfos(WriteEventInfo elem) {
if (this.writeEventInfos == null) {
this.writeEventInfos = new java.util.ArrayList<WriteEventInfo>();
}
this.writeEventInfos.add(elem);
}
@org.apache.thrift.annotation.Nullable
public java.util.List<WriteEventInfo> getWriteEventInfos() {
return this.writeEventInfos;
}
public void setWriteEventInfos(@org.apache.thrift.annotation.Nullable java.util.List<WriteEventInfo> writeEventInfos) {
this.writeEventInfos = writeEventInfos;
}
public void unsetWriteEventInfos() {
this.writeEventInfos = null;
}
/** Returns true if field writeEventInfos is set (has been assigned a value) and false otherwise */
public boolean isSetWriteEventInfos() {
return this.writeEventInfos != null;
}
public void setWriteEventInfosIsSet(boolean value) {
if (!value) {
this.writeEventInfos = null;
}
}
@org.apache.thrift.annotation.Nullable
public ReplLastIdInfo getReplLastIdInfo() {
return this.replLastIdInfo;
}
public void setReplLastIdInfo(@org.apache.thrift.annotation.Nullable ReplLastIdInfo replLastIdInfo) {
this.replLastIdInfo = replLastIdInfo;
}
public void unsetReplLastIdInfo() {
this.replLastIdInfo = null;
}
/** Returns true if field replLastIdInfo is set (has been assigned a value) and false otherwise */
public boolean isSetReplLastIdInfo() {
return this.replLastIdInfo != null;
}
public void setReplLastIdInfoIsSet(boolean value) {
if (!value) {
this.replLastIdInfo = null;
}
}
@org.apache.thrift.annotation.Nullable
public CommitTxnKeyValue getKeyValue() {
return this.keyValue;
}
public void setKeyValue(@org.apache.thrift.annotation.Nullable CommitTxnKeyValue keyValue) {
this.keyValue = keyValue;
}
public void unsetKeyValue() {
this.keyValue = null;
}
/** Returns true if field keyValue is set (has been assigned a value) and false otherwise */
public boolean isSetKeyValue() {
return this.keyValue != null;
}
public void setKeyValueIsSet(boolean value) {
if (!value) {
this.keyValue = null;
}
}
public boolean isExclWriteEnabled() {
return this.exclWriteEnabled;
}
public void setExclWriteEnabled(boolean exclWriteEnabled) {
this.exclWriteEnabled = exclWriteEnabled;
setExclWriteEnabledIsSet(true);
}
public void unsetExclWriteEnabled() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __EXCLWRITEENABLED_ISSET_ID);
}
/** Returns true if field exclWriteEnabled is set (has been assigned a value) and false otherwise */
public boolean isSetExclWriteEnabled() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __EXCLWRITEENABLED_ISSET_ID);
}
public void setExclWriteEnabledIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __EXCLWRITEENABLED_ISSET_ID, value);
}
/**
*
* @see TxnType
*/
@org.apache.thrift.annotation.Nullable
public TxnType getTxn_type() {
return this.txn_type;
}
/**
*
* @see TxnType
*/
public void setTxn_type(@org.apache.thrift.annotation.Nullable TxnType txn_type) {
this.txn_type = txn_type;
}
public void unsetTxn_type() {
this.txn_type = null;
}
/** Returns true if field txn_type is set (has been assigned a value) and false otherwise */
public boolean isSetTxn_type() {
return this.txn_type != null;
}
public void setTxn_typeIsSet(boolean value) {
if (!value) {
this.txn_type = null;
}
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
case TXNID:
if (value == null) {
unsetTxnid();
} else {
setTxnid((java.lang.Long)value);
}
break;
case REPL_POLICY:
if (value == null) {
unsetReplPolicy();
} else {
setReplPolicy((java.lang.String)value);
}
break;
case WRITE_EVENT_INFOS:
if (value == null) {
unsetWriteEventInfos();
} else {
setWriteEventInfos((java.util.List<WriteEventInfo>)value);
}
break;
case REPL_LAST_ID_INFO:
if (value == null) {
unsetReplLastIdInfo();
} else {
setReplLastIdInfo((ReplLastIdInfo)value);
}
break;
case KEY_VALUE:
if (value == null) {
unsetKeyValue();
} else {
setKeyValue((CommitTxnKeyValue)value);
}
break;
case EXCL_WRITE_ENABLED:
if (value == null) {
unsetExclWriteEnabled();
} else {
setExclWriteEnabled((java.lang.Boolean)value);
}
break;
case TXN_TYPE:
if (value == null) {
unsetTxn_type();
} else {
setTxn_type((TxnType)value);
}
break;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case TXNID:
return getTxnid();
case REPL_POLICY:
return getReplPolicy();
case WRITE_EVENT_INFOS:
return getWriteEventInfos();
case REPL_LAST_ID_INFO:
return getReplLastIdInfo();
case KEY_VALUE:
return getKeyValue();
case EXCL_WRITE_ENABLED:
return isExclWriteEnabled();
case TXN_TYPE:
return getTxn_type();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case TXNID:
return isSetTxnid();
case REPL_POLICY:
return isSetReplPolicy();
case WRITE_EVENT_INFOS:
return isSetWriteEventInfos();
case REPL_LAST_ID_INFO:
return isSetReplLastIdInfo();
case KEY_VALUE:
return isSetKeyValue();
case EXCL_WRITE_ENABLED:
return isSetExclWriteEnabled();
case TXN_TYPE:
return isSetTxn_type();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof CommitTxnRequest)
return this.equals((CommitTxnRequest)that);
return false;
}
public boolean equals(CommitTxnRequest that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_txnid = true;
boolean that_present_txnid = true;
if (this_present_txnid || that_present_txnid) {
if (!(this_present_txnid && that_present_txnid))
return false;
if (this.txnid != that.txnid)
return false;
}
boolean this_present_replPolicy = true && this.isSetReplPolicy();
boolean that_present_replPolicy = true && that.isSetReplPolicy();
if (this_present_replPolicy || that_present_replPolicy) {
if (!(this_present_replPolicy && that_present_replPolicy))
return false;
if (!this.replPolicy.equals(that.replPolicy))
return false;
}
boolean this_present_writeEventInfos = true && this.isSetWriteEventInfos();
boolean that_present_writeEventInfos = true && that.isSetWriteEventInfos();
if (this_present_writeEventInfos || that_present_writeEventInfos) {
if (!(this_present_writeEventInfos && that_present_writeEventInfos))
return false;
if (!this.writeEventInfos.equals(that.writeEventInfos))
return false;
}
boolean this_present_replLastIdInfo = true && this.isSetReplLastIdInfo();
boolean that_present_replLastIdInfo = true && that.isSetReplLastIdInfo();
if (this_present_replLastIdInfo || that_present_replLastIdInfo) {
if (!(this_present_replLastIdInfo && that_present_replLastIdInfo))
return false;
if (!this.replLastIdInfo.equals(that.replLastIdInfo))
return false;
}
boolean this_present_keyValue = true && this.isSetKeyValue();
boolean that_present_keyValue = true && that.isSetKeyValue();
if (this_present_keyValue || that_present_keyValue) {
if (!(this_present_keyValue && that_present_keyValue))
return false;
if (!this.keyValue.equals(that.keyValue))
return false;
}
boolean this_present_exclWriteEnabled = true && this.isSetExclWriteEnabled();
boolean that_present_exclWriteEnabled = true && that.isSetExclWriteEnabled();
if (this_present_exclWriteEnabled || that_present_exclWriteEnabled) {
if (!(this_present_exclWriteEnabled && that_present_exclWriteEnabled))
return false;
if (this.exclWriteEnabled != that.exclWriteEnabled)
return false;
}
boolean this_present_txn_type = true && this.isSetTxn_type();
boolean that_present_txn_type = true && that.isSetTxn_type();
if (this_present_txn_type || that_present_txn_type) {
if (!(this_present_txn_type && that_present_txn_type))
return false;
if (!this.txn_type.equals(that.txn_type))
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + org.apache.thrift.TBaseHelper.hashCode(txnid);
hashCode = hashCode * 8191 + ((isSetReplPolicy()) ? 131071 : 524287);
if (isSetReplPolicy())
hashCode = hashCode * 8191 + replPolicy.hashCode();
hashCode = hashCode * 8191 + ((isSetWriteEventInfos()) ? 131071 : 524287);
if (isSetWriteEventInfos())
hashCode = hashCode * 8191 + writeEventInfos.hashCode();
hashCode = hashCode * 8191 + ((isSetReplLastIdInfo()) ? 131071 : 524287);
if (isSetReplLastIdInfo())
hashCode = hashCode * 8191 + replLastIdInfo.hashCode();
hashCode = hashCode * 8191 + ((isSetKeyValue()) ? 131071 : 524287);
if (isSetKeyValue())
hashCode = hashCode * 8191 + keyValue.hashCode();
hashCode = hashCode * 8191 + ((isSetExclWriteEnabled()) ? 131071 : 524287);
if (isSetExclWriteEnabled())
hashCode = hashCode * 8191 + ((exclWriteEnabled) ? 131071 : 524287);
hashCode = hashCode * 8191 + ((isSetTxn_type()) ? 131071 : 524287);
if (isSetTxn_type())
hashCode = hashCode * 8191 + txn_type.getValue();
return hashCode;
}
@Override
public int compareTo(CommitTxnRequest other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.compare(isSetTxnid(), other.isSetTxnid());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetTxnid()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.txnid, other.txnid);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetReplPolicy(), other.isSetReplPolicy());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetReplPolicy()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.replPolicy, other.replPolicy);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetWriteEventInfos(), other.isSetWriteEventInfos());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetWriteEventInfos()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.writeEventInfos, other.writeEventInfos);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetReplLastIdInfo(), other.isSetReplLastIdInfo());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetReplLastIdInfo()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.replLastIdInfo, other.replLastIdInfo);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetKeyValue(), other.isSetKeyValue());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetKeyValue()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.keyValue, other.keyValue);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetExclWriteEnabled(), other.isSetExclWriteEnabled());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetExclWriteEnabled()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.exclWriteEnabled, other.exclWriteEnabled);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetTxn_type(), other.isSetTxn_type());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetTxn_type()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.txn_type, other.txn_type);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("CommitTxnRequest(");
boolean first = true;
sb.append("txnid:");
sb.append(this.txnid);
first = false;
if (isSetReplPolicy()) {
if (!first) sb.append(", ");
sb.append("replPolicy:");
if (this.replPolicy == null) {
sb.append("null");
} else {
sb.append(this.replPolicy);
}
first = false;
}
if (isSetWriteEventInfos()) {
if (!first) sb.append(", ");
sb.append("writeEventInfos:");
if (this.writeEventInfos == null) {
sb.append("null");
} else {
sb.append(this.writeEventInfos);
}
first = false;
}
if (isSetReplLastIdInfo()) {
if (!first) sb.append(", ");
sb.append("replLastIdInfo:");
if (this.replLastIdInfo == null) {
sb.append("null");
} else {
sb.append(this.replLastIdInfo);
}
first = false;
}
if (isSetKeyValue()) {
if (!first) sb.append(", ");
sb.append("keyValue:");
if (this.keyValue == null) {
sb.append("null");
} else {
sb.append(this.keyValue);
}
first = false;
}
if (isSetExclWriteEnabled()) {
if (!first) sb.append(", ");
sb.append("exclWriteEnabled:");
sb.append(this.exclWriteEnabled);
first = false;
}
if (isSetTxn_type()) {
if (!first) sb.append(", ");
sb.append("txn_type:");
if (this.txn_type == null) {
sb.append("null");
} else {
sb.append(this.txn_type);
}
first = false;
}
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if (!isSetTxnid()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'txnid' is unset! Struct:" + toString());
}
// check for sub-struct validity
if (replLastIdInfo != null) {
replLastIdInfo.validate();
}
if (keyValue != null) {
keyValue.validate();
}
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class CommitTxnRequestStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public CommitTxnRequestStandardScheme getScheme() {
return new CommitTxnRequestStandardScheme();
}
}
private static class CommitTxnRequestStandardScheme extends org.apache.thrift.scheme.StandardScheme<CommitTxnRequest> {
public void read(org.apache.thrift.protocol.TProtocol iprot, CommitTxnRequest struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // TXNID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.txnid = iprot.readI64();
struct.setTxnidIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // REPL_POLICY
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.replPolicy = iprot.readString();
struct.setReplPolicyIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // WRITE_EVENT_INFOS
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list828 = iprot.readListBegin();
struct.writeEventInfos = new java.util.ArrayList<WriteEventInfo>(_list828.size);
@org.apache.thrift.annotation.Nullable WriteEventInfo _elem829;
for (int _i830 = 0; _i830 < _list828.size; ++_i830)
{
_elem829 = new WriteEventInfo();
_elem829.read(iprot);
struct.writeEventInfos.add(_elem829);
}
iprot.readListEnd();
}
struct.setWriteEventInfosIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 4: // REPL_LAST_ID_INFO
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.replLastIdInfo = new ReplLastIdInfo();
struct.replLastIdInfo.read(iprot);
struct.setReplLastIdInfoIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 5: // KEY_VALUE
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.keyValue = new CommitTxnKeyValue();
struct.keyValue.read(iprot);
struct.setKeyValueIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 6: // EXCL_WRITE_ENABLED
if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
struct.exclWriteEnabled = iprot.readBool();
struct.setExclWriteEnabledIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 7: // TXN_TYPE
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.txn_type = org.apache.hadoop.hive.metastore.api.TxnType.findByValue(iprot.readI32());
struct.setTxn_typeIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, CommitTxnRequest struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(TXNID_FIELD_DESC);
oprot.writeI64(struct.txnid);
oprot.writeFieldEnd();
if (struct.replPolicy != null) {
if (struct.isSetReplPolicy()) {
oprot.writeFieldBegin(REPL_POLICY_FIELD_DESC);
oprot.writeString(struct.replPolicy);
oprot.writeFieldEnd();
}
}
if (struct.writeEventInfos != null) {
if (struct.isSetWriteEventInfos()) {
oprot.writeFieldBegin(WRITE_EVENT_INFOS_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.writeEventInfos.size()));
for (WriteEventInfo _iter831 : struct.writeEventInfos)
{
_iter831.write(oprot);
}
oprot.writeListEnd();
}
oprot.writeFieldEnd();
}
}
if (struct.replLastIdInfo != null) {
if (struct.isSetReplLastIdInfo()) {
oprot.writeFieldBegin(REPL_LAST_ID_INFO_FIELD_DESC);
struct.replLastIdInfo.write(oprot);
oprot.writeFieldEnd();
}
}
if (struct.keyValue != null) {
if (struct.isSetKeyValue()) {
oprot.writeFieldBegin(KEY_VALUE_FIELD_DESC);
struct.keyValue.write(oprot);
oprot.writeFieldEnd();
}
}
if (struct.isSetExclWriteEnabled()) {
oprot.writeFieldBegin(EXCL_WRITE_ENABLED_FIELD_DESC);
oprot.writeBool(struct.exclWriteEnabled);
oprot.writeFieldEnd();
}
if (struct.txn_type != null) {
if (struct.isSetTxn_type()) {
oprot.writeFieldBegin(TXN_TYPE_FIELD_DESC);
oprot.writeI32(struct.txn_type.getValue());
oprot.writeFieldEnd();
}
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class CommitTxnRequestTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public CommitTxnRequestTupleScheme getScheme() {
return new CommitTxnRequestTupleScheme();
}
}
private static class CommitTxnRequestTupleScheme extends org.apache.thrift.scheme.TupleScheme<CommitTxnRequest> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, CommitTxnRequest struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
oprot.writeI64(struct.txnid);
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetReplPolicy()) {
optionals.set(0);
}
if (struct.isSetWriteEventInfos()) {
optionals.set(1);
}
if (struct.isSetReplLastIdInfo()) {
optionals.set(2);
}
if (struct.isSetKeyValue()) {
optionals.set(3);
}
if (struct.isSetExclWriteEnabled()) {
optionals.set(4);
}
if (struct.isSetTxn_type()) {
optionals.set(5);
}
oprot.writeBitSet(optionals, 6);
if (struct.isSetReplPolicy()) {
oprot.writeString(struct.replPolicy);
}
if (struct.isSetWriteEventInfos()) {
{
oprot.writeI32(struct.writeEventInfos.size());
for (WriteEventInfo _iter832 : struct.writeEventInfos)
{
_iter832.write(oprot);
}
}
}
if (struct.isSetReplLastIdInfo()) {
struct.replLastIdInfo.write(oprot);
}
if (struct.isSetKeyValue()) {
struct.keyValue.write(oprot);
}
if (struct.isSetExclWriteEnabled()) {
oprot.writeBool(struct.exclWriteEnabled);
}
if (struct.isSetTxn_type()) {
oprot.writeI32(struct.txn_type.getValue());
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, CommitTxnRequest struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
struct.txnid = iprot.readI64();
struct.setTxnidIsSet(true);
java.util.BitSet incoming = iprot.readBitSet(6);
if (incoming.get(0)) {
struct.replPolicy = iprot.readString();
struct.setReplPolicyIsSet(true);
}
if (incoming.get(1)) {
{
org.apache.thrift.protocol.TList _list833 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRUCT);
struct.writeEventInfos = new java.util.ArrayList<WriteEventInfo>(_list833.size);
@org.apache.thrift.annotation.Nullable WriteEventInfo _elem834;
for (int _i835 = 0; _i835 < _list833.size; ++_i835)
{
_elem834 = new WriteEventInfo();
_elem834.read(iprot);
struct.writeEventInfos.add(_elem834);
}
}
struct.setWriteEventInfosIsSet(true);
}
if (incoming.get(2)) {
struct.replLastIdInfo = new ReplLastIdInfo();
struct.replLastIdInfo.read(iprot);
struct.setReplLastIdInfoIsSet(true);
}
if (incoming.get(3)) {
struct.keyValue = new CommitTxnKeyValue();
struct.keyValue.read(iprot);
struct.setKeyValueIsSet(true);
}
if (incoming.get(4)) {
struct.exclWriteEnabled = iprot.readBool();
struct.setExclWriteEnabledIsSet(true);
}
if (incoming.get(5)) {
struct.txn_type = org.apache.hadoop.hive.metastore.api.TxnType.findByValue(iprot.readI32());
struct.setTxn_typeIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
|
apache/rya | 37,880 | extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/freetext/query/Node.java | /* Generated By:JJTree: Do not edit this line. Node.java Version 4.3 */
/* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */
package org.apache.rya.indexing.accumulo.freetext.query;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/* All AST nodes must implement this interface. It provides basic
machinery for constructing the parent and child relationships
between nodes. */
public
interface Node {
/** This method is called after the node has been made the current
node. It indicates that child nodes can now be added to it. */
public void jjtOpen();
/** This method is called after all the child nodes have been
added. */
public void jjtClose();
/** This pair of methods are used to inform the node of its
parent. */
public void jjtSetParent(Node n);
public Node jjtGetParent();
/** This method tells the node to add its argument to the node's
list of children. */
public void jjtAddChild(Node n, int i);
/** This method returns a child node. The children are numbered
from zero, left to right. */
public Node jjtGetChild(int i);
/** Return the number of children the node has. */
public int jjtGetNumChildren();
}
/* JavaCC - OriginalChecksum=e66efa9c359bf70af0cdb4f33bea0630 (do not edit this line) */
|
googleapis/google-api-java-client-services | 37,930 | clients/google-api-services-iamcredentials/v1/1.31.0/com/google/api/services/iamcredentials/v1/IAMCredentials.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.iamcredentials.v1;
/**
* Service definition for IAMCredentials (v1).
*
* <p>
* Creates short-lived credentials for impersonating IAM service accounts. To enable this API, you must enable the IAM API (iam.googleapis.com).
* </p>
*
* <p>
* For more information about this service, see the
* <a href="https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials" target="_blank">API Documentation</a>
* </p>
*
* <p>
* This service uses {@link IAMCredentialsRequestInitializer} to initialize global parameters via its
* {@link Builder}.
* </p>
*
* @since 1.3
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public class IAMCredentials extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient {
// Note: Leave this static initializer at the top of the file.
static {
com.google.api.client.util.Preconditions.checkState(
com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 &&
(com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 32 ||
(com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION == 31 &&
com.google.api.client.googleapis.GoogleUtils.BUGFIX_VERSION >= 1)),
"You are currently running with version %s of google-api-client. " +
"You need at least version 1.31.1 of google-api-client to run version " +
"1.32.1 of the IAM Service Account Credentials API library.", com.google.api.client.googleapis.GoogleUtils.VERSION);
}
/**
* The default encoded root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_ROOT_URL = "https://iamcredentials.googleapis.com/";
/**
* The default encoded mTLS root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.31
*/
public static final String DEFAULT_MTLS_ROOT_URL = "https://iamcredentials.mtls.googleapis.com/";
/**
* The default encoded service path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_SERVICE_PATH = "";
/**
* The default encoded batch path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.23
*/
public static final String DEFAULT_BATCH_PATH = "batch";
/**
* The default encoded base URL of the service. This is determined when the library is generated
* and normally should not be changed.
*/
public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH;
/**
* Constructor.
*
* <p>
* Use {@link Builder} if you need to specify any of the optional parameters.
* </p>
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public IAMCredentials(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
this(new Builder(transport, jsonFactory, httpRequestInitializer));
}
/**
* @param builder builder
*/
IAMCredentials(Builder builder) {
super(builder);
}
@Override
protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException {
super.initialize(httpClientRequest);
}
/**
* An accessor for creating requests from the Projects collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code IAMCredentials iamcredentials = new IAMCredentials(...);}
* {@code IAMCredentials.Projects.List request = iamcredentials.projects().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Projects projects() {
return new Projects();
}
/**
* The "projects" collection of methods.
*/
public class Projects {
/**
* An accessor for creating requests from the ServiceAccounts collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code IAMCredentials iamcredentials = new IAMCredentials(...);}
* {@code IAMCredentials.ServiceAccounts.List request = iamcredentials.serviceAccounts().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public ServiceAccounts serviceAccounts() {
return new ServiceAccounts();
}
/**
* The "serviceAccounts" collection of methods.
*/
public class ServiceAccounts {
/**
* Generates an OAuth 2.0 access token for a service account.
*
* Create a request for the method "serviceAccounts.generateAccessToken".
*
* This request holds the parameters needed by the iamcredentials server. After setting any
* optional parameters, call the {@link GenerateAccessToken#execute()} method to invoke the remote
* operation.
*
* @param name Required. The resource name of the service account for which the credentials are requested, in the
* following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-`
* wildcard character is required; replacing it with a project ID is invalid.
* @param content the {@link com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenRequest}
* @return the request
*/
public GenerateAccessToken generateAccessToken(java.lang.String name, com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenRequest content) throws java.io.IOException {
GenerateAccessToken result = new GenerateAccessToken(name, content);
initialize(result);
return result;
}
public class GenerateAccessToken extends IAMCredentialsRequest<com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenResponse> {
private static final String REST_PATH = "v1/{+name}:generateAccessToken";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^projects/[^/]+/serviceAccounts/[^/]+$");
/**
* Generates an OAuth 2.0 access token for a service account.
*
* Create a request for the method "serviceAccounts.generateAccessToken".
*
* This request holds the parameters needed by the the iamcredentials server. After setting any
* optional parameters, call the {@link GenerateAccessToken#execute()} method to invoke the remote
* operation. <p> {@link GenerateAccessToken#initialize(com.google.api.client.googleapis.services.
* AbstractGoogleClientRequest)} must be called to initialize this instance immediately after
* invoking the constructor. </p>
*
* @param name Required. The resource name of the service account for which the credentials are requested, in the
* following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-`
* wildcard character is required; replacing it with a project ID is invalid.
* @param content the {@link com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenRequest}
* @since 1.13
*/
protected GenerateAccessToken(java.lang.String name, com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenRequest content) {
super(IAMCredentials.this, "POST", REST_PATH, content, com.google.api.services.iamcredentials.v1.model.GenerateAccessTokenResponse.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^projects/[^/]+/serviceAccounts/[^/]+$");
}
}
@Override
public GenerateAccessToken set$Xgafv(java.lang.String $Xgafv) {
return (GenerateAccessToken) super.set$Xgafv($Xgafv);
}
@Override
public GenerateAccessToken setAccessToken(java.lang.String accessToken) {
return (GenerateAccessToken) super.setAccessToken(accessToken);
}
@Override
public GenerateAccessToken setAlt(java.lang.String alt) {
return (GenerateAccessToken) super.setAlt(alt);
}
@Override
public GenerateAccessToken setCallback(java.lang.String callback) {
return (GenerateAccessToken) super.setCallback(callback);
}
@Override
public GenerateAccessToken setFields(java.lang.String fields) {
return (GenerateAccessToken) super.setFields(fields);
}
@Override
public GenerateAccessToken setKey(java.lang.String key) {
return (GenerateAccessToken) super.setKey(key);
}
@Override
public GenerateAccessToken setOauthToken(java.lang.String oauthToken) {
return (GenerateAccessToken) super.setOauthToken(oauthToken);
}
@Override
public GenerateAccessToken setPrettyPrint(java.lang.Boolean prettyPrint) {
return (GenerateAccessToken) super.setPrettyPrint(prettyPrint);
}
@Override
public GenerateAccessToken setQuotaUser(java.lang.String quotaUser) {
return (GenerateAccessToken) super.setQuotaUser(quotaUser);
}
@Override
public GenerateAccessToken setUploadType(java.lang.String uploadType) {
return (GenerateAccessToken) super.setUploadType(uploadType);
}
@Override
public GenerateAccessToken setUploadProtocol(java.lang.String uploadProtocol) {
return (GenerateAccessToken) super.setUploadProtocol(uploadProtocol);
}
/**
* Required. The resource name of the service account for which the credentials are
* requested, in the following format:
* `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is
* required; replacing it with a project ID is invalid.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/** Required. The resource name of the service account for which the credentials are requested, in the
following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard
character is required; replacing it with a project ID is invalid.
*/
public java.lang.String getName() {
return name;
}
/**
* Required. The resource name of the service account for which the credentials are
* requested, in the following format:
* `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is
* required; replacing it with a project ID is invalid.
*/
public GenerateAccessToken setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^projects/[^/]+/serviceAccounts/[^/]+$");
}
this.name = name;
return this;
}
@Override
public GenerateAccessToken set(String parameterName, Object value) {
return (GenerateAccessToken) super.set(parameterName, value);
}
}
/**
* Generates an OpenID Connect ID token for a service account.
*
* Create a request for the method "serviceAccounts.generateIdToken".
*
* This request holds the parameters needed by the iamcredentials server. After setting any
* optional parameters, call the {@link GenerateIdToken#execute()} method to invoke the remote
* operation.
*
* @param name Required. The resource name of the service account for which the credentials are requested, in the
* following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-`
* wildcard character is required; replacing it with a project ID is invalid.
* @param content the {@link com.google.api.services.iamcredentials.v1.model.GenerateIdTokenRequest}
* @return the request
*/
public GenerateIdToken generateIdToken(java.lang.String name, com.google.api.services.iamcredentials.v1.model.GenerateIdTokenRequest content) throws java.io.IOException {
GenerateIdToken result = new GenerateIdToken(name, content);
initialize(result);
return result;
}
public class GenerateIdToken extends IAMCredentialsRequest<com.google.api.services.iamcredentials.v1.model.GenerateIdTokenResponse> {
private static final String REST_PATH = "v1/{+name}:generateIdToken";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^projects/[^/]+/serviceAccounts/[^/]+$");
/**
* Generates an OpenID Connect ID token for a service account.
*
* Create a request for the method "serviceAccounts.generateIdToken".
*
* This request holds the parameters needed by the the iamcredentials server. After setting any
* optional parameters, call the {@link GenerateIdToken#execute()} method to invoke the remote
* operation. <p> {@link GenerateIdToken#initialize(com.google.api.client.googleapis.services.Abst
* ractGoogleClientRequest)} must be called to initialize this instance immediately after invoking
* the constructor. </p>
*
* @param name Required. The resource name of the service account for which the credentials are requested, in the
* following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-`
* wildcard character is required; replacing it with a project ID is invalid.
* @param content the {@link com.google.api.services.iamcredentials.v1.model.GenerateIdTokenRequest}
* @since 1.13
*/
protected GenerateIdToken(java.lang.String name, com.google.api.services.iamcredentials.v1.model.GenerateIdTokenRequest content) {
super(IAMCredentials.this, "POST", REST_PATH, content, com.google.api.services.iamcredentials.v1.model.GenerateIdTokenResponse.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^projects/[^/]+/serviceAccounts/[^/]+$");
}
}
@Override
public GenerateIdToken set$Xgafv(java.lang.String $Xgafv) {
return (GenerateIdToken) super.set$Xgafv($Xgafv);
}
@Override
public GenerateIdToken setAccessToken(java.lang.String accessToken) {
return (GenerateIdToken) super.setAccessToken(accessToken);
}
@Override
public GenerateIdToken setAlt(java.lang.String alt) {
return (GenerateIdToken) super.setAlt(alt);
}
@Override
public GenerateIdToken setCallback(java.lang.String callback) {
return (GenerateIdToken) super.setCallback(callback);
}
@Override
public GenerateIdToken setFields(java.lang.String fields) {
return (GenerateIdToken) super.setFields(fields);
}
@Override
public GenerateIdToken setKey(java.lang.String key) {
return (GenerateIdToken) super.setKey(key);
}
@Override
public GenerateIdToken setOauthToken(java.lang.String oauthToken) {
return (GenerateIdToken) super.setOauthToken(oauthToken);
}
@Override
public GenerateIdToken setPrettyPrint(java.lang.Boolean prettyPrint) {
return (GenerateIdToken) super.setPrettyPrint(prettyPrint);
}
@Override
public GenerateIdToken setQuotaUser(java.lang.String quotaUser) {
return (GenerateIdToken) super.setQuotaUser(quotaUser);
}
@Override
public GenerateIdToken setUploadType(java.lang.String uploadType) {
return (GenerateIdToken) super.setUploadType(uploadType);
}
@Override
public GenerateIdToken setUploadProtocol(java.lang.String uploadProtocol) {
return (GenerateIdToken) super.setUploadProtocol(uploadProtocol);
}
/**
* Required. The resource name of the service account for which the credentials are
* requested, in the following format:
* `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is
* required; replacing it with a project ID is invalid.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/** Required. The resource name of the service account for which the credentials are requested, in the
following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard
character is required; replacing it with a project ID is invalid.
*/
public java.lang.String getName() {
return name;
}
/**
* Required. The resource name of the service account for which the credentials are
* requested, in the following format:
* `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is
* required; replacing it with a project ID is invalid.
*/
public GenerateIdToken setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^projects/[^/]+/serviceAccounts/[^/]+$");
}
this.name = name;
return this;
}
@Override
public GenerateIdToken set(String parameterName, Object value) {
return (GenerateIdToken) super.set(parameterName, value);
}
}
/**
* Signs a blob using a service account's system-managed private key.
*
* Create a request for the method "serviceAccounts.signBlob".
*
* This request holds the parameters needed by the iamcredentials server. After setting any
* optional parameters, call the {@link SignBlob#execute()} method to invoke the remote operation.
*
* @param name Required. The resource name of the service account for which the credentials are requested, in the
* following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-`
* wildcard character is required; replacing it with a project ID is invalid.
* @param content the {@link com.google.api.services.iamcredentials.v1.model.SignBlobRequest}
* @return the request
*/
public SignBlob signBlob(java.lang.String name, com.google.api.services.iamcredentials.v1.model.SignBlobRequest content) throws java.io.IOException {
SignBlob result = new SignBlob(name, content);
initialize(result);
return result;
}
public class SignBlob extends IAMCredentialsRequest<com.google.api.services.iamcredentials.v1.model.SignBlobResponse> {
private static final String REST_PATH = "v1/{+name}:signBlob";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^projects/[^/]+/serviceAccounts/[^/]+$");
/**
* Signs a blob using a service account's system-managed private key.
*
* Create a request for the method "serviceAccounts.signBlob".
*
* This request holds the parameters needed by the the iamcredentials server. After setting any
* optional parameters, call the {@link SignBlob#execute()} method to invoke the remote operation.
* <p> {@link
* SignBlob#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
* must be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param name Required. The resource name of the service account for which the credentials are requested, in the
* following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-`
* wildcard character is required; replacing it with a project ID is invalid.
* @param content the {@link com.google.api.services.iamcredentials.v1.model.SignBlobRequest}
* @since 1.13
*/
protected SignBlob(java.lang.String name, com.google.api.services.iamcredentials.v1.model.SignBlobRequest content) {
super(IAMCredentials.this, "POST", REST_PATH, content, com.google.api.services.iamcredentials.v1.model.SignBlobResponse.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^projects/[^/]+/serviceAccounts/[^/]+$");
}
}
@Override
public SignBlob set$Xgafv(java.lang.String $Xgafv) {
return (SignBlob) super.set$Xgafv($Xgafv);
}
@Override
public SignBlob setAccessToken(java.lang.String accessToken) {
return (SignBlob) super.setAccessToken(accessToken);
}
@Override
public SignBlob setAlt(java.lang.String alt) {
return (SignBlob) super.setAlt(alt);
}
@Override
public SignBlob setCallback(java.lang.String callback) {
return (SignBlob) super.setCallback(callback);
}
@Override
public SignBlob setFields(java.lang.String fields) {
return (SignBlob) super.setFields(fields);
}
@Override
public SignBlob setKey(java.lang.String key) {
return (SignBlob) super.setKey(key);
}
@Override
public SignBlob setOauthToken(java.lang.String oauthToken) {
return (SignBlob) super.setOauthToken(oauthToken);
}
@Override
public SignBlob setPrettyPrint(java.lang.Boolean prettyPrint) {
return (SignBlob) super.setPrettyPrint(prettyPrint);
}
@Override
public SignBlob setQuotaUser(java.lang.String quotaUser) {
return (SignBlob) super.setQuotaUser(quotaUser);
}
@Override
public SignBlob setUploadType(java.lang.String uploadType) {
return (SignBlob) super.setUploadType(uploadType);
}
@Override
public SignBlob setUploadProtocol(java.lang.String uploadProtocol) {
return (SignBlob) super.setUploadProtocol(uploadProtocol);
}
/**
* Required. The resource name of the service account for which the credentials are
* requested, in the following format:
* `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is
* required; replacing it with a project ID is invalid.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/** Required. The resource name of the service account for which the credentials are requested, in the
following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard
character is required; replacing it with a project ID is invalid.
*/
public java.lang.String getName() {
return name;
}
/**
* Required. The resource name of the service account for which the credentials are
* requested, in the following format:
* `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is
* required; replacing it with a project ID is invalid.
*/
public SignBlob setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^projects/[^/]+/serviceAccounts/[^/]+$");
}
this.name = name;
return this;
}
@Override
public SignBlob set(String parameterName, Object value) {
return (SignBlob) super.set(parameterName, value);
}
}
/**
* Signs a JWT using a service account's system-managed private key.
*
* Create a request for the method "serviceAccounts.signJwt".
*
* This request holds the parameters needed by the iamcredentials server. After setting any
* optional parameters, call the {@link SignJwt#execute()} method to invoke the remote operation.
*
* @param name Required. The resource name of the service account for which the credentials are requested, in the
* following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-`
* wildcard character is required; replacing it with a project ID is invalid.
* @param content the {@link com.google.api.services.iamcredentials.v1.model.SignJwtRequest}
* @return the request
*/
public SignJwt signJwt(java.lang.String name, com.google.api.services.iamcredentials.v1.model.SignJwtRequest content) throws java.io.IOException {
SignJwt result = new SignJwt(name, content);
initialize(result);
return result;
}
public class SignJwt extends IAMCredentialsRequest<com.google.api.services.iamcredentials.v1.model.SignJwtResponse> {
private static final String REST_PATH = "v1/{+name}:signJwt";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^projects/[^/]+/serviceAccounts/[^/]+$");
/**
* Signs a JWT using a service account's system-managed private key.
*
* Create a request for the method "serviceAccounts.signJwt".
*
* This request holds the parameters needed by the the iamcredentials server. After setting any
* optional parameters, call the {@link SignJwt#execute()} method to invoke the remote operation.
* <p> {@link
* SignJwt#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must
* be called to initialize this instance immediately after invoking the constructor. </p>
*
* @param name Required. The resource name of the service account for which the credentials are requested, in the
* following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-`
* wildcard character is required; replacing it with a project ID is invalid.
* @param content the {@link com.google.api.services.iamcredentials.v1.model.SignJwtRequest}
* @since 1.13
*/
protected SignJwt(java.lang.String name, com.google.api.services.iamcredentials.v1.model.SignJwtRequest content) {
super(IAMCredentials.this, "POST", REST_PATH, content, com.google.api.services.iamcredentials.v1.model.SignJwtResponse.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^projects/[^/]+/serviceAccounts/[^/]+$");
}
}
@Override
public SignJwt set$Xgafv(java.lang.String $Xgafv) {
return (SignJwt) super.set$Xgafv($Xgafv);
}
@Override
public SignJwt setAccessToken(java.lang.String accessToken) {
return (SignJwt) super.setAccessToken(accessToken);
}
@Override
public SignJwt setAlt(java.lang.String alt) {
return (SignJwt) super.setAlt(alt);
}
@Override
public SignJwt setCallback(java.lang.String callback) {
return (SignJwt) super.setCallback(callback);
}
@Override
public SignJwt setFields(java.lang.String fields) {
return (SignJwt) super.setFields(fields);
}
@Override
public SignJwt setKey(java.lang.String key) {
return (SignJwt) super.setKey(key);
}
@Override
public SignJwt setOauthToken(java.lang.String oauthToken) {
return (SignJwt) super.setOauthToken(oauthToken);
}
@Override
public SignJwt setPrettyPrint(java.lang.Boolean prettyPrint) {
return (SignJwt) super.setPrettyPrint(prettyPrint);
}
@Override
public SignJwt setQuotaUser(java.lang.String quotaUser) {
return (SignJwt) super.setQuotaUser(quotaUser);
}
@Override
public SignJwt setUploadType(java.lang.String uploadType) {
return (SignJwt) super.setUploadType(uploadType);
}
@Override
public SignJwt setUploadProtocol(java.lang.String uploadProtocol) {
return (SignJwt) super.setUploadProtocol(uploadProtocol);
}
/**
* Required. The resource name of the service account for which the credentials are
* requested, in the following format:
* `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is
* required; replacing it with a project ID is invalid.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/** Required. The resource name of the service account for which the credentials are requested, in the
following format: `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard
character is required; replacing it with a project ID is invalid.
*/
public java.lang.String getName() {
return name;
}
/**
* Required. The resource name of the service account for which the credentials are
* requested, in the following format:
* `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. The `-` wildcard character is
* required; replacing it with a project ID is invalid.
*/
public SignJwt setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^projects/[^/]+/serviceAccounts/[^/]+$");
}
this.name = name;
return this;
}
@Override
public SignJwt set(String parameterName, Object value) {
return (SignJwt) super.set(parameterName, value);
}
}
}
}
/**
* Builder for {@link IAMCredentials}.
*
* <p>
* Implementation is not thread-safe.
* </p>
*
* @since 1.3.0
*/
public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder {
private static String chooseEndpoint(com.google.api.client.http.HttpTransport transport) {
// If the GOOGLE_API_USE_MTLS_ENDPOINT environment variable value is "always", use mTLS endpoint.
// If the env variable is "auto", use mTLS endpoint if and only if the transport is mTLS.
// Use the regular endpoint for all other cases.
String useMtlsEndpoint = System.getenv("GOOGLE_API_USE_MTLS_ENDPOINT");
useMtlsEndpoint = useMtlsEndpoint == null ? "auto" : useMtlsEndpoint;
if ("always".equals(useMtlsEndpoint) || ("auto".equals(useMtlsEndpoint) && transport != null && transport.isMtls())) {
return DEFAULT_MTLS_ROOT_URL;
}
return DEFAULT_ROOT_URL;
}
/**
* Returns an instance of a new builder.
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
super(
transport,
jsonFactory,
Builder.chooseEndpoint(transport),
DEFAULT_SERVICE_PATH,
httpRequestInitializer,
false);
setBatchPath(DEFAULT_BATCH_PATH);
}
/** Builds a new instance of {@link IAMCredentials}. */
@Override
public IAMCredentials build() {
return new IAMCredentials(this);
}
@Override
public Builder setRootUrl(String rootUrl) {
return (Builder) super.setRootUrl(rootUrl);
}
@Override
public Builder setServicePath(String servicePath) {
return (Builder) super.setServicePath(servicePath);
}
@Override
public Builder setBatchPath(String batchPath) {
return (Builder) super.setBatchPath(batchPath);
}
@Override
public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
return (Builder) super.setHttpRequestInitializer(httpRequestInitializer);
}
@Override
public Builder setApplicationName(String applicationName) {
return (Builder) super.setApplicationName(applicationName);
}
@Override
public Builder setSuppressPatternChecks(boolean suppressPatternChecks) {
return (Builder) super.setSuppressPatternChecks(suppressPatternChecks);
}
@Override
public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) {
return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks);
}
@Override
public Builder setSuppressAllChecks(boolean suppressAllChecks) {
return (Builder) super.setSuppressAllChecks(suppressAllChecks);
}
/**
* Set the {@link IAMCredentialsRequestInitializer}.
*
* @since 1.12
*/
public Builder setIAMCredentialsRequestInitializer(
IAMCredentialsRequestInitializer iamcredentialsRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(iamcredentialsRequestInitializer);
}
@Override
public Builder setGoogleClientRequestInitializer(
com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer);
}
}
}
|
googleapis/google-cloud-java | 37,797 | java-websecurityscanner/proto-google-cloud-websecurityscanner-v1beta/src/main/java/com/google/cloud/websecurityscanner/v1beta/UpdateScanConfigRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/websecurityscanner/v1beta/web_security_scanner.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.websecurityscanner.v1beta;
/**
*
*
* <pre>
* Request for the `UpdateScanConfigRequest` method.
* </pre>
*
* Protobuf type {@code google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest}
*/
public final class UpdateScanConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest)
UpdateScanConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateScanConfigRequest.newBuilder() to construct.
private UpdateScanConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateScanConfigRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateScanConfigRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.websecurityscanner.v1beta.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1beta_UpdateScanConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.websecurityscanner.v1beta.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1beta_UpdateScanConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest.class,
com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest.Builder.class);
}
private int bitField0_;
public static final int SCAN_CONFIG_FIELD_NUMBER = 2;
private com.google.cloud.websecurityscanner.v1beta.ScanConfig scanConfig_;
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>
* .google.cloud.websecurityscanner.v1beta.ScanConfig scan_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the scanConfig field is set.
*/
@java.lang.Override
public boolean hasScanConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>
* .google.cloud.websecurityscanner.v1beta.ScanConfig scan_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The scanConfig.
*/
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.ScanConfig getScanConfig() {
return scanConfig_ == null
? com.google.cloud.websecurityscanner.v1beta.ScanConfig.getDefaultInstance()
: scanConfig_;
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>
* .google.cloud.websecurityscanner.v1beta.ScanConfig scan_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.ScanConfigOrBuilder getScanConfigOrBuilder() {
return scanConfig_ == null
? com.google.cloud.websecurityscanner.v1beta.ScanConfig.getDefaultInstance()
: scanConfig_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 3;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getScanConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(3, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getScanConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest)) {
return super.equals(obj);
}
com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest other =
(com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest) obj;
if (hasScanConfig() != other.hasScanConfig()) return false;
if (hasScanConfig()) {
if (!getScanConfig().equals(other.getScanConfig())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasScanConfig()) {
hash = (37 * hash) + SCAN_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getScanConfig().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for the `UpdateScanConfigRequest` method.
* </pre>
*
* Protobuf type {@code google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest)
com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.websecurityscanner.v1beta.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1beta_UpdateScanConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.websecurityscanner.v1beta.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1beta_UpdateScanConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest.class,
com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest.Builder.class);
}
// Construct using
// com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getScanConfigFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
scanConfig_ = null;
if (scanConfigBuilder_ != null) {
scanConfigBuilder_.dispose();
scanConfigBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.websecurityscanner.v1beta.WebSecurityScannerProto
.internal_static_google_cloud_websecurityscanner_v1beta_UpdateScanConfigRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest
getDefaultInstanceForType() {
return com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest build() {
com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest buildPartial() {
com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest result =
new com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.scanConfig_ = scanConfigBuilder_ == null ? scanConfig_ : scanConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest) {
return mergeFrom(
(com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest other) {
if (other
== com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest
.getDefaultInstance()) return this;
if (other.hasScanConfig()) {
mergeScanConfig(other.getScanConfig());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18:
{
input.readMessage(getScanConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 18
case 26:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.websecurityscanner.v1beta.ScanConfig scanConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.websecurityscanner.v1beta.ScanConfig,
com.google.cloud.websecurityscanner.v1beta.ScanConfig.Builder,
com.google.cloud.websecurityscanner.v1beta.ScanConfigOrBuilder>
scanConfigBuilder_;
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>
* .google.cloud.websecurityscanner.v1beta.ScanConfig scan_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the scanConfig field is set.
*/
public boolean hasScanConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>
* .google.cloud.websecurityscanner.v1beta.ScanConfig scan_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The scanConfig.
*/
public com.google.cloud.websecurityscanner.v1beta.ScanConfig getScanConfig() {
if (scanConfigBuilder_ == null) {
return scanConfig_ == null
? com.google.cloud.websecurityscanner.v1beta.ScanConfig.getDefaultInstance()
: scanConfig_;
} else {
return scanConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>
* .google.cloud.websecurityscanner.v1beta.ScanConfig scan_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setScanConfig(com.google.cloud.websecurityscanner.v1beta.ScanConfig value) {
if (scanConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
scanConfig_ = value;
} else {
scanConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>
* .google.cloud.websecurityscanner.v1beta.ScanConfig scan_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setScanConfig(
com.google.cloud.websecurityscanner.v1beta.ScanConfig.Builder builderForValue) {
if (scanConfigBuilder_ == null) {
scanConfig_ = builderForValue.build();
} else {
scanConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>
* .google.cloud.websecurityscanner.v1beta.ScanConfig scan_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeScanConfig(com.google.cloud.websecurityscanner.v1beta.ScanConfig value) {
if (scanConfigBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& scanConfig_ != null
&& scanConfig_
!= com.google.cloud.websecurityscanner.v1beta.ScanConfig.getDefaultInstance()) {
getScanConfigBuilder().mergeFrom(value);
} else {
scanConfig_ = value;
}
} else {
scanConfigBuilder_.mergeFrom(value);
}
if (scanConfig_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>
* .google.cloud.websecurityscanner.v1beta.ScanConfig scan_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearScanConfig() {
bitField0_ = (bitField0_ & ~0x00000001);
scanConfig_ = null;
if (scanConfigBuilder_ != null) {
scanConfigBuilder_.dispose();
scanConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>
* .google.cloud.websecurityscanner.v1beta.ScanConfig scan_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.websecurityscanner.v1beta.ScanConfig.Builder getScanConfigBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getScanConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>
* .google.cloud.websecurityscanner.v1beta.ScanConfig scan_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.websecurityscanner.v1beta.ScanConfigOrBuilder getScanConfigOrBuilder() {
if (scanConfigBuilder_ != null) {
return scanConfigBuilder_.getMessageOrBuilder();
} else {
return scanConfig_ == null
? com.google.cloud.websecurityscanner.v1beta.ScanConfig.getDefaultInstance()
: scanConfig_;
}
}
/**
*
*
* <pre>
* Required. The ScanConfig to be updated. The name field must be set to identify the
* resource to be updated. The values of fields not covered by the mask
* will be ignored.
* </pre>
*
* <code>
* .google.cloud.websecurityscanner.v1beta.ScanConfig scan_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.websecurityscanner.v1beta.ScanConfig,
com.google.cloud.websecurityscanner.v1beta.ScanConfig.Builder,
com.google.cloud.websecurityscanner.v1beta.ScanConfigOrBuilder>
getScanConfigFieldBuilder() {
if (scanConfigBuilder_ == null) {
scanConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.websecurityscanner.v1beta.ScanConfig,
com.google.cloud.websecurityscanner.v1beta.ScanConfig.Builder,
com.google.cloud.websecurityscanner.v1beta.ScanConfigOrBuilder>(
getScanConfig(), getParentForChildren(), isClean());
scanConfig_ = null;
}
return scanConfigBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource. For the `FieldMask` definition,
* see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest)
private static final com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest();
}
public static com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateScanConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateScanConfigRequest>() {
@java.lang.Override
public UpdateScanConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateScanConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateScanConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/druid | 37,774 | indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopIndexTask.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.indexer;
import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import org.apache.commons.lang3.BooleanUtils;
import org.apache.druid.annotations.SuppressFBWarnings;
import org.apache.druid.indexer.granularity.ArbitraryGranularitySpec;
import org.apache.druid.indexer.granularity.GranularitySpec;
import org.apache.druid.indexer.path.SegmentMetadataPublisher;
import org.apache.druid.indexer.report.TaskReport;
import org.apache.druid.indexing.common.TaskLock;
import org.apache.druid.indexing.common.TaskLockType;
import org.apache.druid.indexing.common.TaskToolbox;
import org.apache.druid.indexing.common.actions.TaskActionClient;
import org.apache.druid.indexing.common.actions.TimeChunkLockAcquireAction;
import org.apache.druid.indexing.common.actions.TimeChunkLockTryAcquireAction;
import org.apache.druid.indexing.common.config.TaskConfig;
import org.apache.druid.indexing.common.task.AbstractTask;
import org.apache.druid.indexing.common.task.Tasks;
import org.apache.druid.indexing.overlord.IndexerMetadataStorageCoordinator;
import org.apache.druid.java.util.common.JodaUtils;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularity;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.segment.incremental.RowIngestionMeters;
import org.apache.druid.segment.realtime.ChatHandler;
import org.apache.druid.segment.realtime.ChatHandlerProvider;
import org.apache.druid.server.security.AuthorizationUtils;
import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.timeline.DataSegment;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.util.ToolRunner;
import org.joda.time.Interval;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.File;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
@SuppressFBWarnings({"NP_NONNULL_PARAM_VIOLATION", "NP_STORE_INTO_NONNULL_FIELD"})
public class HadoopIndexTask extends HadoopTask implements ChatHandler
{
public static final String TYPE = "index_hadoop";
public static final String INPUT_SOURCE_TYPE = "hadoop";
private static final Logger log = new Logger(HadoopIndexTask.class);
private static final String HADOOP_JOB_ID_FILENAME = "mapReduceJobId.json";
private TaskConfig taskConfig = null;
private static String getTheDataSource(HadoopIngestionSpec spec)
{
return spec.getDataSchema().getDataSource();
}
@JsonIgnore
private HadoopIngestionSpec spec;
@JsonIgnore
private final String classpathPrefix;
@JsonIgnore
private final ObjectMapper jsonMapper;
@JsonIgnore
private final AuthorizerMapper authorizerMapper;
@JsonIgnore
private final HadoopTaskConfig hadoopTaskConfig;
@JsonIgnore
private final Optional<ChatHandlerProvider> chatHandlerProvider;
@JsonIgnore
private InnerProcessingStatsGetter determinePartitionsStatsGetter;
@JsonIgnore
private InnerProcessingStatsGetter buildSegmentsStatsGetter;
@JsonIgnore
private IngestionState ingestionState;
@JsonIgnore
private HadoopDetermineConfigInnerProcessingStatus determineConfigStatus = null;
@JsonIgnore
private HadoopIndexGeneratorInnerProcessingStatus buildSegmentsStatus = null;
@JsonIgnore
private String errorMsg;
/**
* @param spec is used by the HadoopDruidIndexerJob to set up the appropriate parameters
* for creating Druid index segments. It may be modified.
* <p/>
* Here, we will ensure that the DbConnectorConfig field of the spec is set to null, such that the
* job does not push a list of published segments the database. Instead, we will use the method
* IndexGeneratorJob.getPublishedSegments() to simply return a list of the published
* segments, and let the indexing service report these segments to the database.
*/
@JsonCreator
public HadoopIndexTask(
@JsonProperty("id") String id,
@JsonProperty("spec") HadoopIngestionSpec spec,
@JsonProperty("hadoopCoordinates") String hadoopCoordinates,
@JsonProperty("hadoopDependencyCoordinates") List<String> hadoopDependencyCoordinates,
@JsonProperty("classpathPrefix") String classpathPrefix,
@JacksonInject ObjectMapper jsonMapper,
@JsonProperty("context") Map<String, Object> context,
@JacksonInject AuthorizerMapper authorizerMapper,
@JacksonInject ChatHandlerProvider chatHandlerProvider,
@JacksonInject HadoopTaskConfig hadoopTaskConfig
)
{
super(
AbstractTask.getOrMakeId(id, TYPE, getTheDataSource(spec)),
getTheDataSource(spec),
hadoopDependencyCoordinates == null
? (hadoopCoordinates == null ? null : ImmutableList.of(hadoopCoordinates))
: hadoopDependencyCoordinates,
context,
hadoopTaskConfig
);
this.authorizerMapper = authorizerMapper;
this.chatHandlerProvider = Optional.fromNullable(chatHandlerProvider);
this.spec = context == null ? spec : spec.withContext(context);
// Some HadoopIngestionSpec stuff doesn't make sense in the context of the indexing service
Preconditions.checkArgument(
this.spec.getIOConfig().getSegmentOutputPath() == null,
"segmentOutputPath must be absent"
);
Preconditions.checkArgument(this.spec.getTuningConfig().getWorkingPath() == null, "workingPath must be absent");
Preconditions.checkArgument(
this.spec.getIOConfig().getMetadataUpdateSpec() == null,
"metadataUpdateSpec must be absent"
);
this.hadoopTaskConfig = hadoopTaskConfig;
this.classpathPrefix = classpathPrefix;
this.jsonMapper = Preconditions.checkNotNull(jsonMapper, "null ObjectMappper");
this.ingestionState = IngestionState.NOT_STARTED;
}
@Override
public String getType()
{
return TYPE;
}
@Nonnull
@JsonIgnore
@Override
public Set<ResourceAction> getInputSourceResources()
{
return Set.of(AuthorizationUtils.createExternalResourceReadAction(INPUT_SOURCE_TYPE));
}
@Override
public boolean isReady(TaskActionClient taskActionClient) throws Exception
{
Iterable<Interval> intervals = spec.getDataSchema().getGranularitySpec().sortedBucketIntervals();
if (intervals.iterator().hasNext()) {
Interval interval = JodaUtils.umbrellaInterval(
JodaUtils.condenseIntervals(intervals)
);
final TaskLock lock = taskActionClient.submit(
new TimeChunkLockTryAcquireAction(
TaskLockType.EXCLUSIVE,
interval
)
);
if (lock == null) {
return false;
}
lock.assertNotRevoked();
return true;
} else {
return true;
}
}
@Override
public boolean requireLockExistingSegments()
{
throw new UnsupportedOperationException();
}
@Override
public List<DataSegment> findSegmentsToLock(TaskActionClient taskActionClient, List<Interval> intervals)
{
throw new UnsupportedOperationException();
}
@Override
public boolean isPerfectRollup()
{
return true;
}
@Nullable
@Override
public Granularity getSegmentGranularity()
{
final GranularitySpec granularitySpec = spec.getDataSchema().getGranularitySpec();
if (granularitySpec instanceof ArbitraryGranularitySpec) {
return null;
} else {
return granularitySpec.getSegmentGranularity();
}
}
@JsonProperty("spec")
public HadoopIngestionSpec getSpec()
{
return spec;
}
@Override
@JsonProperty
public List<String> getHadoopDependencyCoordinates()
{
return super.getHadoopDependencyCoordinates();
}
@JsonProperty
@Override
public String getClasspathPrefix()
{
return classpathPrefix;
}
private String getHadoopJobIdFileName()
{
return getHadoopJobIdFile().getAbsolutePath();
}
private boolean hadoopJobIdFileExists()
{
return getHadoopJobIdFile().exists();
}
private File getHadoopJobIdFile()
{
return new File(taskConfig.getTaskDir(getId()), HADOOP_JOB_ID_FILENAME);
}
@Override
public TaskStatus runTask(TaskToolbox toolbox)
{
try {
taskConfig = toolbox.getConfig();
if (!taskConfig.isAllowHadoopTaskExecution()) {
errorMsg = StringUtils.format(
"Hadoop tasks are deprecated and will be removed in a future release. "
+ "Currently, they are not allowed to run on this cluster. If you wish to run them despite deprecation, "
+ "please set [%s] to true.",
TaskConfig.ALLOW_HADOOP_TASK_EXECUTION_KEY
);
log.error(errorMsg);
toolbox.getTaskReportFileWriter().write(getId(), getTaskCompletionReports());
return TaskStatus.failure(getId(), errorMsg);
}
log.warn("Running deprecated index_hadoop task [%s]. "
+ "Hadoop batch indexing is deprecated and will be removed in a future release. "
+ "Please plan your migration to one of Druid's supported indexing patterns.",
getId()
);
if (chatHandlerProvider.isPresent()) {
log.info("Found chat handler of class[%s]", chatHandlerProvider.get().getClass().getName());
chatHandlerProvider.get().register(getId(), this, false);
} else {
log.warn("No chat handler detected");
}
return runInternal(toolbox);
}
catch (Exception e) {
Throwable effectiveException;
if (e instanceof RuntimeException && e.getCause() instanceof InvocationTargetException) {
InvocationTargetException ite = (InvocationTargetException) e.getCause();
effectiveException = ite.getCause();
log.error(effectiveException, "Got invocation target exception in run()");
} else {
effectiveException = e;
log.error(e, "Encountered exception in run()");
}
errorMsg = Throwables.getStackTraceAsString(effectiveException);
toolbox.getTaskReportFileWriter().write(getId(), getTaskCompletionReports());
return TaskStatus.failure(
getId(),
errorMsg
);
}
finally {
if (chatHandlerProvider.isPresent()) {
chatHandlerProvider.get().unregister(getId());
}
}
}
@SuppressWarnings("unchecked")
private TaskStatus runInternal(TaskToolbox toolbox) throws Exception
{
boolean indexGeneratorJobAttempted = false;
boolean indexGeneratorJobSuccess = false;
HadoopIngestionSpec indexerSchema = null;
try {
registerResourceCloserOnAbnormalExit(config -> killHadoopJob());
String hadoopJobIdFile = getHadoopJobIdFileName();
HadoopTask.logExtensionsConfig();
final ClassLoader loader = buildClassLoader();
boolean determineIntervals = spec.getDataSchema().getGranularitySpec().inputIntervals().isEmpty();
HadoopIngestionSpec.updateSegmentListIfDatasourcePathSpecIsUsed(
spec,
jsonMapper,
new OverlordActionBasedUsedSegmentsRetriever(toolbox)
);
Object determinePartitionsInnerProcessingRunner = HadoopTask.getForeignClassloaderObject(
"org.apache.druid.indexer.HadoopIndexTask$HadoopDetermineConfigInnerProcessingRunner",
loader
);
determinePartitionsStatsGetter = new InnerProcessingStatsGetter(determinePartitionsInnerProcessingRunner);
String[] determinePartitionsInput = new String[]{
toolbox.getJsonMapper().writeValueAsString(spec),
hadoopTaskConfig.getHadoopWorkingPath(),
toolbox.getSegmentPusher().getPathForHadoop(),
hadoopJobIdFile
};
final ClassLoader oldLoader = Thread.currentThread().getContextClassLoader();
Class<?> determinePartitionsRunnerClass = determinePartitionsInnerProcessingRunner.getClass();
Method determinePartitionsInnerProcessingRunTask = determinePartitionsRunnerClass.getMethod(
"runTask",
determinePartitionsInput.getClass()
);
try {
Thread.currentThread().setContextClassLoader(loader);
ingestionState = IngestionState.DETERMINE_PARTITIONS;
final String determineConfigStatusString = (String) determinePartitionsInnerProcessingRunTask.invoke(
determinePartitionsInnerProcessingRunner,
new Object[]{determinePartitionsInput}
);
determineConfigStatus = toolbox
.getJsonMapper()
.readValue(determineConfigStatusString, HadoopDetermineConfigInnerProcessingStatus.class);
indexerSchema = determineConfigStatus.getSchema();
if (indexerSchema == null) {
errorMsg = determineConfigStatus.getErrorMsg();
toolbox.getTaskReportFileWriter().write(getId(), getTaskCompletionReports());
return TaskStatus.failure(
getId(),
errorMsg
);
}
}
catch (Exception e) {
throw new RuntimeException(e);
}
finally {
Thread.currentThread().setContextClassLoader(oldLoader);
}
// We should have a lock from before we started running only if interval was specified
String version;
if (determineIntervals) {
Interval interval = JodaUtils.umbrellaInterval(
JodaUtils.condenseIntervals(
indexerSchema.getDataSchema().getGranularitySpec().sortedBucketIntervals()
)
);
final long lockTimeoutMs = getContextValue(Tasks.LOCK_TIMEOUT_KEY, Tasks.DEFAULT_LOCK_TIMEOUT_MILLIS);
// Note: if lockTimeoutMs is larger than ServerConfig.maxIdleTime, the below line can incur http timeout error.
final TaskLock lock = Preconditions.checkNotNull(
toolbox.getTaskActionClient().submit(
new TimeChunkLockAcquireAction(TaskLockType.EXCLUSIVE, interval, lockTimeoutMs)
),
"Cannot acquire a lock for interval[%s]", interval
);
lock.assertNotRevoked();
version = lock.getVersion();
} else {
Iterable<TaskLock> locks = AbstractTask.getTaskLocks(toolbox.getTaskActionClient());
final TaskLock myLock = Iterables.getOnlyElement(locks);
version = myLock.getVersion();
}
final String specVersion = indexerSchema.getTuningConfig().getVersion();
if (indexerSchema.getTuningConfig().isUseExplicitVersion()) {
if (specVersion.compareTo(version) < 0) {
version = specVersion;
} else {
String errMsg =
StringUtils.format(
"Spec version can not be greater than or equal to the lock version, Spec version: [%s] Lock version: [%s].",
specVersion,
version
);
log.error(errMsg);
toolbox.getTaskReportFileWriter().write(getId(), null);
return TaskStatus.failure(getId(), errMsg);
}
}
log.info("Setting version to: %s", version);
Object innerProcessingRunner = HadoopTask.getForeignClassloaderObject(
"org.apache.druid.indexer.HadoopIndexTask$HadoopIndexGeneratorInnerProcessingRunner",
loader
);
buildSegmentsStatsGetter = new InnerProcessingStatsGetter(innerProcessingRunner);
String[] buildSegmentsInput = new String[]{
toolbox.getJsonMapper().writeValueAsString(indexerSchema),
version,
hadoopJobIdFile
};
Class<?> buildSegmentsRunnerClass = innerProcessingRunner.getClass();
Method innerProcessingRunTask = buildSegmentsRunnerClass.getMethod("runTask", buildSegmentsInput.getClass());
try {
Thread.currentThread().setContextClassLoader(loader);
ingestionState = IngestionState.BUILD_SEGMENTS;
indexGeneratorJobAttempted = true;
final String jobStatusString = (String) innerProcessingRunTask.invoke(
innerProcessingRunner,
new Object[]{buildSegmentsInput}
);
buildSegmentsStatus = toolbox.getJsonMapper().readValue(
jobStatusString,
HadoopIndexGeneratorInnerProcessingStatus.class
);
List<DataSegmentAndIndexZipFilePath> dataSegmentAndIndexZipFilePaths = buildSegmentsStatus.getDataSegmentAndIndexZipFilePaths();
if (dataSegmentAndIndexZipFilePaths != null) {
indexGeneratorJobSuccess = true;
renameSegmentIndexFilesJob(
toolbox.getJsonMapper().writeValueAsString(indexerSchema),
toolbox.getJsonMapper().writeValueAsString(dataSegmentAndIndexZipFilePaths)
);
ArrayList<DataSegment> segments = new ArrayList<>(dataSegmentAndIndexZipFilePaths.stream()
.map(
DataSegmentAndIndexZipFilePath::getSegment)
.collect(Collectors.toList()));
toolbox.publishSegments(segments);
// Try to wait for segments to be loaded by the cluster if the tuning config specifies a non-zero value
// for awaitSegmentAvailabilityTimeoutMillis
if (spec.getTuningConfig().getAwaitSegmentAvailabilityTimeoutMillis() > 0) {
ingestionState = IngestionState.SEGMENT_AVAILABILITY_WAIT;
waitForSegmentAvailability(
toolbox,
segments,
spec.getTuningConfig().getAwaitSegmentAvailabilityTimeoutMillis()
);
}
ingestionState = IngestionState.COMPLETED;
toolbox.getTaskReportFileWriter().write(getId(), getTaskCompletionReports());
return TaskStatus.success(getId());
} else {
errorMsg = buildSegmentsStatus.getErrorMsg();
toolbox.getTaskReportFileWriter().write(getId(), getTaskCompletionReports());
return TaskStatus.failure(
getId(),
errorMsg
);
}
}
catch (Exception e) {
throw new RuntimeException(e);
}
finally {
Thread.currentThread().setContextClassLoader(oldLoader);
}
}
finally {
indexerGeneratorCleanupJob(
indexGeneratorJobAttempted,
indexGeneratorJobSuccess,
indexerSchema == null ? null : toolbox.getJsonMapper().writeValueAsString(indexerSchema)
);
}
}
private void killHadoopJob()
{
// To avoid issue of kill command once the ingestion task is actually completed
if (hadoopJobIdFileExists() && !ingestionState.equals(IngestionState.COMPLETED)) {
final ClassLoader oldLoader = Thread.currentThread().getContextClassLoader();
String hadoopJobIdFile = getHadoopJobIdFileName();
try {
ClassLoader loader = HadoopTask.buildClassLoader(
getHadoopDependencyCoordinates(),
hadoopTaskConfig.getDefaultHadoopCoordinates()
);
Object killMRJobInnerProcessingRunner = HadoopTask.getForeignClassloaderObject(
"org.apache.druid.indexer.HadoopIndexTask$HadoopKillMRJobIdProcessingRunner",
loader
);
String[] buildKillJobInput = new String[]{hadoopJobIdFile};
Class<?> buildKillJobRunnerClass = killMRJobInnerProcessingRunner.getClass();
Method innerProcessingRunTask = buildKillJobRunnerClass.getMethod("runTask", buildKillJobInput.getClass());
Thread.currentThread().setContextClassLoader(loader);
final String[] killStatusString = (String[]) innerProcessingRunTask.invoke(
killMRJobInnerProcessingRunner,
new Object[]{buildKillJobInput}
);
log.info(StringUtils.format("Tried killing job: [%s], status: [%s]", killStatusString[0], killStatusString[1]));
}
catch (Exception e) {
throw new RuntimeException(e);
}
finally {
Thread.currentThread().setContextClassLoader(oldLoader);
}
}
}
/**
* Must be called only when the hadoopy classloader is the current classloader
*/
private void renameSegmentIndexFilesJob(
String hadoopIngestionSpecStr,
String dataSegmentAndIndexZipFilePathListStr
)
{
final ClassLoader loader = Thread.currentThread().getContextClassLoader();
try {
final Class<?> clazz = loader.loadClass(
"org.apache.druid.indexer.HadoopIndexTask$HadoopRenameSegmentIndexFilesRunner"
);
Object renameSegmentIndexFilesRunner = clazz.newInstance();
String[] renameSegmentIndexFilesJobInput = new String[]{
hadoopIngestionSpecStr,
dataSegmentAndIndexZipFilePathListStr
};
Class<?> buildRenameSegmentIndexFilesJobRunnerClass = renameSegmentIndexFilesRunner.getClass();
Method renameSegmentIndexFiles = buildRenameSegmentIndexFilesJobRunnerClass.getMethod(
"runTask",
renameSegmentIndexFilesJobInput.getClass()
);
renameSegmentIndexFiles.invoke(
renameSegmentIndexFilesRunner,
new Object[]{renameSegmentIndexFilesJobInput}
);
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
private void indexerGeneratorCleanupJob(
boolean indexGeneratorJobAttempted,
boolean indexGeneratorJobSuccess,
String hadoopIngestionSpecStr
)
{
if (!indexGeneratorJobAttempted) {
log.info("No need for cleanup as index generator job did not even run");
return;
}
final ClassLoader oldLoader = Thread.currentThread().getContextClassLoader();
try {
ClassLoader loader = HadoopTask.buildClassLoader(
getHadoopDependencyCoordinates(),
hadoopTaskConfig.getDefaultHadoopCoordinates()
);
Object indexerGeneratorCleanupRunner = HadoopTask.getForeignClassloaderObject(
"org.apache.druid.indexer.HadoopIndexTask$HadoopIndexerGeneratorCleanupRunner",
loader
);
String[] indexerGeneratorCleanupJobInput = new String[]{
indexGeneratorJobSuccess ? "true" : "false",
hadoopIngestionSpecStr,
};
Class<?> buildIndexerGeneratorCleanupRunnerClass = indexerGeneratorCleanupRunner.getClass();
Method indexerGeneratorCleanup = buildIndexerGeneratorCleanupRunnerClass.getMethod(
"runTask",
indexerGeneratorCleanupJobInput.getClass()
);
Thread.currentThread().setContextClassLoader(loader);
indexerGeneratorCleanup.invoke(
indexerGeneratorCleanupRunner,
new Object[]{indexerGeneratorCleanupJobInput}
);
}
catch (Exception e) {
log.warn(e, "Failed to cleanup after index generator job");
}
finally {
Thread.currentThread().setContextClassLoader(oldLoader);
}
}
@GET
@Path("/rowStats")
@Produces(MediaType.APPLICATION_JSON)
public Response getRowStats(
@Context final HttpServletRequest req,
@QueryParam("windows") List<Integer> windows
)
{
AuthorizationUtils.verifyUnrestrictedAccessToDatasource(req, getDataSource(), authorizerMapper);
Map<String, Object> totalsMap = new HashMap<>();
if (determinePartitionsStatsGetter != null) {
totalsMap.put(RowIngestionMeters.DETERMINE_PARTITIONS, determinePartitionsStatsGetter.getTotalMetrics());
}
if (buildSegmentsStatsGetter != null) {
totalsMap.put(RowIngestionMeters.BUILD_SEGMENTS, buildSegmentsStatsGetter.getTotalMetrics());
}
return Response.ok(Map.of("totals", totalsMap)).build();
}
private TaskReport.ReportMap getTaskCompletionReports()
{
return buildIngestionStatsAndContextReport(ingestionState, errorMsg, null, null);
}
@Override
protected Map<String, Object> getTaskCompletionRowStats()
{
Map<String, Object> metrics = new HashMap<>();
if (determineConfigStatus != null) {
metrics.put(
RowIngestionMeters.DETERMINE_PARTITIONS,
determineConfigStatus.getMetrics()
);
}
if (buildSegmentsStatus != null) {
metrics.put(
RowIngestionMeters.BUILD_SEGMENTS,
buildSegmentsStatus.getMetrics()
);
}
return metrics;
}
public static class InnerProcessingStatsGetter implements TaskMetricsGetter
{
static final List<String> KEYS = ImmutableList.of(
TaskMetricsUtils.ROWS_PROCESSED,
TaskMetricsUtils.ROWS_PROCESSED_WITH_ERRORS,
TaskMetricsUtils.ROWS_THROWN_AWAY,
TaskMetricsUtils.ROWS_UNPARSEABLE
);
private final Method getStatsMethod;
private final Object innerProcessingRunner;
public InnerProcessingStatsGetter(
Object innerProcessingRunner
)
{
try {
Class<?> aClazz = innerProcessingRunner.getClass();
this.getStatsMethod = aClazz.getMethod("getStats");
this.innerProcessingRunner = innerProcessingRunner;
}
catch (NoSuchMethodException nsme) {
throw new RuntimeException(nsme);
}
}
@Override
public List<String> getKeys()
{
return KEYS;
}
@Nullable
@Override
public Map<String, Number> getTotalMetrics()
{
try {
Map<String, Object> statsMap = (Map<String, Object>) getStatsMethod.invoke(innerProcessingRunner);
if (statsMap == null) {
return null;
}
long curProcessed = (Long) statsMap.get(TaskMetricsUtils.ROWS_PROCESSED);
long curProcessedWithErrors = (Long) statsMap.get(TaskMetricsUtils.ROWS_PROCESSED_WITH_ERRORS);
long curThrownAway = (Long) statsMap.get(TaskMetricsUtils.ROWS_THROWN_AWAY);
long curUnparseable = (Long) statsMap.get(TaskMetricsUtils.ROWS_UNPARSEABLE);
return ImmutableMap.of(
TaskMetricsUtils.ROWS_PROCESSED, curProcessed,
TaskMetricsUtils.ROWS_PROCESSED_WITH_ERRORS, curProcessedWithErrors,
TaskMetricsUtils.ROWS_THROWN_AWAY, curThrownAway,
TaskMetricsUtils.ROWS_UNPARSEABLE, curUnparseable
);
}
catch (Exception e) {
log.error(e, "Got exception from getTotalMetrics()");
return null;
}
}
}
/**
* Called indirectly in {@link HadoopIndexTask#run(TaskToolbox)}.
*/
@SuppressWarnings("unused")
public static class HadoopDetermineConfigInnerProcessingRunner
{
private HadoopDruidDetermineConfigurationJob job;
public String runTask(String[] args) throws Exception
{
final String schema = args[0];
final String workingPath = args[1];
final String segmentOutputPath = args[2];
final String hadoopJobIdFile = args[3];
final HadoopIngestionSpec theSchema = HadoopDruidIndexerConfig.JSON_MAPPER
.readValue(
schema,
HadoopIngestionSpec.class
);
final HadoopDruidIndexerConfig config = HadoopDruidIndexerConfig.fromSpec(
theSchema
.withIOConfig(theSchema.getIOConfig().withSegmentOutputPath(segmentOutputPath))
.withTuningConfig(theSchema.getTuningConfig().withWorkingPath(workingPath))
);
job = new HadoopDruidDetermineConfigurationJob(config);
job.setHadoopJobIdFile(hadoopJobIdFile);
log.info("Starting a hadoop determine configuration job...");
if (job.run()) {
return HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsString(
new HadoopDetermineConfigInnerProcessingStatus(config.getSchema(), job.getStats(), null)
);
} else {
return HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsString(
new HadoopDetermineConfigInnerProcessingStatus(null, job.getStats(), job.getErrorMessage())
);
}
}
public Map<String, Object> getStats()
{
if (job == null) {
return null;
}
return job.getStats();
}
}
@SuppressWarnings("unused")
public static class HadoopIndexGeneratorInnerProcessingRunner
{
private HadoopDruidIndexerJob job;
public String runTask(String[] args) throws Exception
{
final String schema = args[0];
String version = args[1];
final String hadoopJobIdFile = args[2];
final HadoopIngestionSpec theSchema = HadoopDruidIndexerConfig.JSON_MAPPER
.readValue(
schema,
HadoopIngestionSpec.class
);
final HadoopDruidIndexerConfig config = HadoopDruidIndexerConfig.fromSpec(
theSchema
.withTuningConfig(theSchema.getTuningConfig().withVersion(version))
);
// SegmentMetadataPublisher is only needed when running standalone without indexing service
// In that case the whatever runs the Hadoop Index Task must ensure IndexerMetadataStorageCoordinator
// can be injected based on the configuration given in config.getSchema().getIOConfig().getMetadataUpdateSpec()
final SegmentMetadataPublisher maybeHandler;
if (config.isUpdaterJobSpecSet()) {
maybeHandler = new SegmentMetadataPublisher(INJECTOR.getInstance(IndexerMetadataStorageCoordinator.class));
} else {
maybeHandler = null;
}
job = new HadoopDruidIndexerJob(config, maybeHandler);
job.setHadoopJobIdFile(hadoopJobIdFile);
log.info("Starting a hadoop index generator job...");
try {
if (job.run()) {
return HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsString(
new HadoopIndexGeneratorInnerProcessingStatus(
job.getPublishedSegmentAndIndexZipFilePaths(),
job.getStats(),
null
)
);
} else {
return HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsString(
new HadoopIndexGeneratorInnerProcessingStatus(
null,
job.getStats(),
job.getErrorMessage()
)
);
}
}
catch (Exception e) {
log.error(e, "Encountered exception in HadoopIndexGeneratorInnerProcessing.");
return HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsString(
new HadoopIndexGeneratorInnerProcessingStatus(
null,
job.getStats(),
e.getMessage()
)
);
}
}
public Map<String, Object> getStats()
{
if (job == null) {
return null;
}
return job.getStats();
}
}
@SuppressWarnings("unused")
public static class HadoopKillMRJobIdProcessingRunner
{
public String[] runTask(String[] args) throws Exception
{
File hadoopJobIdFile = new File(args[0]);
String jobId = null;
try {
if (hadoopJobIdFile.exists()) {
jobId = HadoopDruidIndexerConfig.JSON_MAPPER.readValue(hadoopJobIdFile, String.class);
}
}
catch (Exception e) {
log.warn(e, "exeption while reading hadoop job id from: [%s]", hadoopJobIdFile);
}
if (jobId != null) {
// This call to JobHelper#authenticate will be transparent if already authenticated or using inseucre Hadoop.
JobHelper.authenticate();
int res = ToolRunner.run(new JobClient(), new String[]{
"-kill",
jobId
});
return new String[]{jobId, (res == 0 ? "Success" : "Fail")};
}
return new String[]{jobId, "Fail"};
}
}
@SuppressWarnings("unused")
public static class HadoopRenameSegmentIndexFilesRunner
{
TypeReference<List<DataSegmentAndIndexZipFilePath>> LIST_DATA_SEGMENT_AND_INDEX_ZIP_FILE_PATH =
new TypeReference<>() {};
public void runTask(String[] args) throws Exception
{
if (args.length != 2) {
log.warn("HadoopRenameSegmentIndexFilesRunner called with improper number of arguments");
}
String hadoopIngestionSpecStr = args[0];
String dataSegmentAndIndexZipFilePathListStr = args[1];
HadoopIngestionSpec indexerSchema;
List<DataSegmentAndIndexZipFilePath> dataSegmentAndIndexZipFilePaths;
try {
indexerSchema = HadoopDruidIndexerConfig.JSON_MAPPER.readValue(
hadoopIngestionSpecStr,
HadoopIngestionSpec.class
);
dataSegmentAndIndexZipFilePaths = HadoopDruidIndexerConfig.JSON_MAPPER.readValue(
dataSegmentAndIndexZipFilePathListStr,
LIST_DATA_SEGMENT_AND_INDEX_ZIP_FILE_PATH
);
}
catch (Exception e) {
log.warn(
e,
"HadoopRenameSegmentIndexFilesRunner: Error occurred while trying to read input parameters into data objects"
);
throw e;
}
JobHelper.renameIndexFilesForSegments(
indexerSchema,
dataSegmentAndIndexZipFilePaths
);
}
}
@SuppressWarnings("unused")
public static class HadoopIndexerGeneratorCleanupRunner
{
TypeReference<List<DataSegmentAndIndexZipFilePath>> LIST_DATA_SEGMENT_AND_INDEX_ZIP_FILE_PATH =
new TypeReference<>() {};
public void runTask(String[] args) throws Exception
{
if (args.length != 2) {
log.warn("HadoopIndexerGeneratorCleanupRunner called with improper number of arguments");
}
String indexGeneratorJobSucceededStr = args[0];
String hadoopIngestionSpecStr = args[1];
HadoopIngestionSpec indexerSchema;
boolean indexGeneratorJobSucceeded;
List<DataSegmentAndIndexZipFilePath> dataSegmentAndIndexZipFilePaths;
try {
indexerSchema = HadoopDruidIndexerConfig.JSON_MAPPER.readValue(
hadoopIngestionSpecStr,
HadoopIngestionSpec.class
);
indexGeneratorJobSucceeded = BooleanUtils.toBoolean(indexGeneratorJobSucceededStr);
}
catch (Exception e) {
log.warn(
e,
"HadoopIndexerGeneratorCleanupRunner: Error occurred while trying to read input parameters into data objects"
);
throw e;
}
JobHelper.maybeDeleteIntermediatePath(
indexGeneratorJobSucceeded,
indexerSchema
);
}
}
public static class HadoopIndexGeneratorInnerProcessingStatus
{
private final List<DataSegmentAndIndexZipFilePath> dataSegmentAndIndexZipFilePaths;
private final Map<String, Object> metrics;
private final String errorMsg;
@JsonCreator
public HadoopIndexGeneratorInnerProcessingStatus(
@JsonProperty("dataSegmentAndIndexZipFilePaths") List<DataSegmentAndIndexZipFilePath> dataSegmentAndIndexZipFilePaths,
@JsonProperty("metrics") Map<String, Object> metrics,
@JsonProperty("errorMsg") String errorMsg
)
{
this.dataSegmentAndIndexZipFilePaths = dataSegmentAndIndexZipFilePaths;
this.metrics = metrics;
this.errorMsg = errorMsg;
}
@JsonProperty
public List<DataSegmentAndIndexZipFilePath> getDataSegmentAndIndexZipFilePaths()
{
return dataSegmentAndIndexZipFilePaths;
}
@JsonProperty
public Map<String, Object> getMetrics()
{
return metrics;
}
@JsonProperty
public String getErrorMsg()
{
return errorMsg;
}
}
public static class HadoopDetermineConfigInnerProcessingStatus
{
private final HadoopIngestionSpec schema;
private final Map<String, Object> metrics;
private final String errorMsg;
@JsonCreator
public HadoopDetermineConfigInnerProcessingStatus(
@JsonProperty("schema") HadoopIngestionSpec schema,
@JsonProperty("metrics") Map<String, Object> metrics,
@JsonProperty("errorMsg") String errorMsg
)
{
this.schema = schema;
this.metrics = metrics;
this.errorMsg = errorMsg;
}
@JsonProperty
public HadoopIngestionSpec getSchema()
{
return schema;
}
@JsonProperty
public Map<String, Object> getMetrics()
{
return metrics;
}
@JsonProperty
public String getErrorMsg()
{
return errorMsg;
}
}
}
|
apache/jmeter | 37,780 | src/protocol/mail/src/main/java/org/apache/jmeter/protocol/smtp/sampler/gui/SmtpPanel.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jmeter.protocol.smtp.sampler.gui;
import java.awt.BorderLayout;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.event.ActionEvent;
import java.io.File;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import javax.swing.BorderFactory;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JFileChooser;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JPasswordField;
import javax.swing.JTextArea;
import javax.swing.JTextField;
import javax.swing.event.ChangeEvent;
import org.apache.jmeter.config.Argument;
import org.apache.jmeter.gui.util.HorizontalPanel;
import org.apache.jmeter.gui.util.VerticalPanel;
import org.apache.jmeter.protocol.smtp.sampler.SmtpSampler;
import org.apache.jmeter.testelement.property.CollectionProperty;
import org.apache.jmeter.util.JMeterUtils;
/**
* Class to build gui-components for SMTP-sampler. Getter-methods serve the
* input-data to the sampler-object, which provides them to the
* SendMailCommand-object.
*/
public class SmtpPanel extends JPanel {
private static final long serialVersionUID = 2L;
// local vars
private JTextField tfMailFrom;
private JTextField tfMailReplyTo;
private JButton browseButton;
private JButton emlBrowseButton;
private JCheckBox cbUseAuth;
private JTextField tfMailServer;
private JTextField tfMailServerPort;
private JTextField tfMailServerTimeout;
private JTextField tfMailServerConnectionTimeout;
private JTextField tfMailTo;
private JTextField tfMailToCC;
private JTextField tfMailToBCC;
private JTextField tfAttachment;
private JTextField tfEmlMessage;
private JTextArea taMessage;
private JCheckBox cbPlainBody;
private JFileChooser attachmentFileChooser;
private JFileChooser emlFileChooser;
private JTextField tfAuthPassword;
private JTextField tfAuthUsername;
private JTextField tfSubject;
private JCheckBox cbSuppressSubject;
private JCheckBox cbIncludeTimestamp;
private JCheckBox cbMessageSizeStats;
private JCheckBox cbEnableDebug;
private JCheckBox cbUseEmlMessage;
private JPanel headerFieldsPanel;
private JButton addHeaderFieldButton;
private JLabel headerFieldName;
private JLabel headerFieldValue;
private final Map<JTextField, JTextField> headerFields = new HashMap<>();
private final Map<JButton,JTextField> removeButtons = new HashMap<>();
private int headerGridY = 0;
private SecuritySettingsPanel securitySettingsPanel;
/**
* Creates new form SmtpPanel, standard constructor. Calls
* initComponents();.
*/
public SmtpPanel() {
initComponents();
}
/**
* Returns sender-address for e-mail from textfield
*
* @return Sender
*/
public String getMailFrom() {
return tfMailFrom.getText();
}
/**
* Returns receiver in field "to" from textfield
*
* @return Receiver "to"
*/
public String getReceiverTo() {
return tfMailTo.getText();
}
/**
* Returns receiver in field "cc" from textfield
*
* @return Receiver "cc"
*/
public String getReceiverCC() {
return tfMailToCC.getText();
}
/**
* Returns receiver in field "bcc" from textfield
*
* @return Receiver "bcc"
*/
public String getReceiverBCC() {
return tfMailToBCC.getText();
}
/**
* Returns message body, i.e. main-mime-part of message (from textfield)
*
* @return Message body
*/
public String getBody() {
return taMessage.getText();
}
/**
* Sets message body, i.e. main-mime-part of message in textfield
*
* @param messageBodyText
* Message body
*/
public void setBody(String messageBodyText) {
taMessage.setText(messageBodyText);
}
/**
* Sets sender-address of e-mail in textfield
*
* @param mailFrom
* Sender
*/
public void setMailFrom(String mailFrom) {
tfMailFrom.setText(mailFrom);
}
/**
* Sets receiver in textfield "to"
*
* @param mailTo
* Receiver "to"
*/
public void setReceiverTo(String mailTo) {
tfMailTo.setText(mailTo);
}
/**
* Sets receiver in textfield "cc"
*
* @param mailToCC
* Receiver "cc"
*/
public void setReceiverCC(String mailToCC) {
tfMailToCC.setText(mailToCC);
}
/**
* Sets receiver in textfield "bcc"
*
* @param mailToBCC
* Receiver "bcc"
*/
public void setReceiverBCC(String mailToBCC) {
tfMailToBCC.setText(mailToBCC);
}
/**
* Returns path of file(s) to be attached in e-mail from textfield
*
* @return File to attach
*/
public String getAttachments() {
return tfAttachment.getText();
}
/**
* Sets path of file to be attached in e-mail in textfield
*
* @param attachments
* File to attach
*/
public void setAttachments(String attachments) {
tfAttachment.setText(attachments);
}
/**
* Returns port of mail-server (standard 25 for SMTP/SMTP with StartTLS, 465
* for SSL) from textfield
*
* @return Mail-server port
*/
public String getPort() {
return tfMailServerPort.getText();
}
/**
* Sets port of mail-server
*
* @param port
* Mail-server port
*/
public void setPort(String port) {
tfMailServerPort.setText(port);
}
/**
* Returns mail-server to be used to send message (from textfield)
*
* @return FQDN or IP of mail-server
*/
public String getServer() {
return tfMailServer.getText();
}
/**
* Sets mail-server to be used to send message in textfield
*
* @param server
* FQDN or IP of mail-server
*/
public void setServer(String server) {
tfMailServer.setText(server);
}
/**
* Returns timeout for SMTP connection from textfield
*
* @return Smtp timeout
*/
public String getTimeout() {
return tfMailServerTimeout.getText();
}
/**
* Sets timeout (ms) for SMTP connection
*
* @param timeout
* SMTP Timeout (ms)
*/
public void setTimeout(String timeout) {
tfMailServerTimeout.setText(timeout);
}
/**
* Returns connection timeout for SMTP connection from textfield
*
* @return SMTP connection timeout
*/
public String getConnectionTimeout() {
return tfMailServerConnectionTimeout.getText();
}
/**
* Sets connection timeout (ms) for SMTP connection
*
* @param connectionTimeout
* SMTP Connection Timeout (ms)
*/
public void setConnectionTimeout(String connectionTimeout) {
tfMailServerConnectionTimeout.setText(connectionTimeout);
}
/**
* Returns subject of the e-mail from textfield
*
* @return Subject of e-mail
*/
public String getSubject() {
return tfSubject.getText();
}
/**
* Sets subject of the e-mail in textfield
*
* @param subject
* Subject of e-mail
*/
public void setSubject(String subject) {
tfSubject.setText(subject);
}
/**
* Returns true if subject header should be suppressed
*
* @return true if subject header should be suppressed
*/
public boolean isSuppressSubject() {
return cbSuppressSubject.isSelected();
}
/**
* Sets the property that defines if the subject header should be suppressed
*
* @param emptySubject flag whether subject header should be suppressed
*
*/
public void setSuppressSubject(boolean emptySubject) {
cbSuppressSubject.setSelected(emptySubject);
}
/**
* Returns true if message body should be plain (i.e. not multipart/mixed)
*
* @return true if using plain message body (i.e. not multipart/mixed)
*/
public boolean isPlainBody() {
return cbPlainBody.isSelected();
}
/**
* Sets the property that defines if the body should be plain (i.e. not multipart/mixed)
*
* @param plainBody whether to use a plain body (i.e. not multipart/mixed)
*/
public void setPlainBody(boolean plainBody) {
cbPlainBody.setSelected(plainBody);
}
/**
* Returns if mail-server needs authentication (checkbox)
*
* @return true if authentication is used
*/
public boolean isUseAuth() {
return cbUseAuth.isSelected();
}
/**
* Set whether mail server needs auth.
*
* @param selected flag whether mail server needs auth
*/
public void setUseAuth(boolean selected){
cbUseAuth.setSelected(selected);
tfAuthPassword.setEditable(selected); // ensure correctly set on initial display
tfAuthUsername.setEditable(selected); // ensure correctly set on initial display
}
public boolean isEnableDebug() {
return cbEnableDebug.isSelected();
}
public void setEnableDebug(boolean selected){
cbEnableDebug.setSelected(selected);
}
/**
* Returns if an .eml-message is sent instead of the content of message-text
* area
*
* @return true if .eml is sent, false if text area content is sent in
* e-mail
*/
public boolean isUseEmlMessage() {
return cbUseEmlMessage.isSelected();
}
/**
* Set the use of an .eml-message instead of the content of message-text
* area
*
* @param useEmlMessage
* Use eml message
*/
public void setUseEmlMessage(boolean useEmlMessage) {
cbUseEmlMessage.setSelected(useEmlMessage);
}
/**
* Returns path to eml message to be sent
*
* @return path to eml message to be sent
*/
public String getEmlMessage() {
return tfEmlMessage.getText();
}
/**
* Set path to eml message to be sent
*
* @param emlMessage
* path to eml message to be sent
*/
public void setEmlMessage(String emlMessage) {
tfEmlMessage.setText(emlMessage);
}
/**
* Returns if current timestamp is included in the subject (checkbox)
*
* @return true if current timestamp is included in subject
*/
public boolean isIncludeTimestamp() {
return cbIncludeTimestamp.isSelected();
}
/**
* Set timestamp to be included in the message-subject (checkbox)
*
* @param includeTimestamp
* Should timestamp be included in subject?
*/
public void setIncludeTimestamp(boolean includeTimestamp) {
cbIncludeTimestamp.setSelected(includeTimestamp);
}
/**
* Returns if message size statistics are processed. Output of processing
* will be included in sample result. (checkbox)
*
* @return True if message size will be calculated
*/
public boolean isMessageSizeStatistics() {
return cbMessageSizeStats.isSelected();
}
/**
* Set message size to be calculated and included in sample result
* (checkbox)
*
* @param val
* Should message size be calculated?
*/
public void setMessageSizeStatistic(boolean val) {
cbMessageSizeStats.setSelected(val);
}
public String getPassword() {
return tfAuthPassword.getText();
}
public void setPassword(String authPassword) {
tfAuthPassword.setText(authPassword);
}
public String getUsername() {
return tfAuthUsername.getText();
}
public void setUsername(String username) {
tfAuthUsername.setText(username);
}
public CollectionProperty getHeaderFields() {
CollectionProperty result = new CollectionProperty();
result.setName(SmtpSampler.HEADER_FIELDS);
for (Map.Entry<JTextField, JTextField> header : headerFields.entrySet()) {
String name = header.getKey().getText();
String value = header.getValue().getText();
Argument argument = new Argument(name, value);
result.addItem(argument);
}
return result;
}
public void setHeaderFields(CollectionProperty fields) {
clearHeaderFields();
for (int i = 0; i < fields.size(); i++) {
Argument argument = (Argument) fields.get(i).getObjectValue();
String name = argument.getName();
JButton removeButton = addHeaderActionPerformed(null);
JTextField nameTF = removeButtons.get(removeButton);
nameTF.setText(name);
JTextField valueTF = headerFields.get(nameTF);
valueTF.setText(argument.getValue());
}
validate();
}
public String getMailReplyTo() {
return tfMailReplyTo.getText();
}
public void setMailReplyTo(String replyTo) {
tfMailReplyTo.setText(replyTo);
}
/**
* Main method of class, builds all gui-components for SMTP-sampler.
*/
private void initComponents() {
JLabel jlAddressReplyTo = new JLabel(JMeterUtils.getResString("smtp_replyto")); // $NON-NLS-1$
JLabel jlAddressFrom = new JLabel(JMeterUtils.getResString("smtp_from")); // $NON-NLS-1$
JLabel jlAddressTo = new JLabel(JMeterUtils.getResString("smtp_to")); // $NON-NLS-1$
JLabel jlAddressToCC = new JLabel(JMeterUtils.getResString("smtp_cc")); // $NON-NLS-1$
JLabel jlAddressToBCC = new JLabel(JMeterUtils.getResString("smtp_bcc")); // $NON-NLS-1$
JLabel jlMailServerPort = new JLabel(JMeterUtils.getResString("smtp_server_port")); // $NON-NLS-1$
JLabel jlMailServer = new JLabel(JMeterUtils.getResString("smtp_server")); // $NON-NLS-1$
JLabel jlMailServerTimeout = new JLabel(JMeterUtils.getResString("smtp_server_timeout")); // $NON-NLS-1$
JLabel jlMailServerConnectionTimeout = new JLabel(JMeterUtils.getResString("smtp_server_connection_timeout")); // $NON-NLS-1$
JLabel jlAttachFile = new JLabel(JMeterUtils.getResString("smtp_attach_file")); // $NON-NLS-1$
JLabel jlDutPortStandard = new JLabel(JMeterUtils.getResString("smtp_default_port")); // $NON-NLS-1$
JLabel jlUsername = new JLabel(JMeterUtils.getResString("smtp_username")); // $NON-NLS-1$
JLabel jlPassword = new JLabel(JMeterUtils.getResString("smtp_password")); // $NON-NLS-1$
JLabel jlSubject = new JLabel(JMeterUtils.getResString("smtp_subject")); // $NON-NLS-1$
JLabel jlMessage = new JLabel(JMeterUtils.getResString("smtp_message")); // $NON-NLS-1$
tfMailServer = new JTextField(30);
tfMailServerPort = new JTextField(6);
tfMailServerTimeout = new JTextField(6);
tfMailServerConnectionTimeout = new JTextField(6);
tfMailFrom = new JTextField(25);
tfMailReplyTo = new JTextField(25);
tfMailTo = new JTextField(25);
tfMailToCC = new JTextField(25);
tfMailToBCC = new JTextField(25);
tfAuthUsername = new JTextField(20);
tfAuthPassword = new JPasswordField(20);
tfSubject = new JTextField(20);
tfAttachment = new JTextField(30);
tfEmlMessage = new JTextField(30);
taMessage = new JTextArea(5, 20);
cbPlainBody = new JCheckBox(JMeterUtils.getResString("smtp_plainbody")); // $NON-NLS-1$
cbSuppressSubject = new JCheckBox(JMeterUtils.getResString("smtp_suppresssubj")); // $NON-NLS-1$
cbSuppressSubject.addChangeListener(this::emptySubjectActionPerformed);
cbUseAuth = new JCheckBox(JMeterUtils.getResString("smtp_useauth")); // $NON-NLS-1$
cbIncludeTimestamp = new JCheckBox(JMeterUtils.getResString("smtp_timestamp")); // $NON-NLS-1$
cbMessageSizeStats = new JCheckBox(JMeterUtils.getResString("smtp_messagesize")); // $NON-NLS-1$
cbEnableDebug = new JCheckBox(JMeterUtils.getResString("smtp_enabledebug")); // $NON-NLS-1$
cbUseEmlMessage = new JCheckBox(JMeterUtils.getResString("smtp_eml")); // $NON-NLS-1$
attachmentFileChooser = new JFileChooser();
emlFileChooser = new JFileChooser();
browseButton = new JButton(JMeterUtils.getResString("browse")); // $NON-NLS-1$
emlBrowseButton = new JButton(JMeterUtils.getResString("browse")); // $NON-NLS-1$
attachmentFileChooser
.addActionListener(this::attachmentFolderFileChooserActionPerformed);
emlFileChooser.addActionListener(this::emlFileChooserActionPerformed);
setLayout(new GridBagLayout());
GridBagConstraints gridBagConstraintsMain = new GridBagConstraints();
gridBagConstraintsMain.fill = GridBagConstraints.HORIZONTAL;
gridBagConstraintsMain.anchor = GridBagConstraints.WEST;
gridBagConstraintsMain.weightx = 0.5;
GridBagConstraints gridBagConstraints = new GridBagConstraints();
gridBagConstraints.insets = new java.awt.Insets(2, 2, 2, 2);
gridBagConstraints.fill = GridBagConstraints.NONE;
gridBagConstraints.anchor = GridBagConstraints.WEST;
gridBagConstraints.weightx = 0.5;
/*
* Server Settings
*/
JPanel panelServerSettings = new VerticalPanel();
panelServerSettings.setBorder(BorderFactory.createTitledBorder(
JMeterUtils.getResString("smtp_server_settings"))); // $NON-NLS-1$
JPanel panelMailServer = new JPanel(new BorderLayout(5, 0));
panelMailServer.add(jlMailServer, BorderLayout.WEST);
panelMailServer.add(tfMailServer, BorderLayout.CENTER);
JPanel panelMailServerPort = new JPanel(new BorderLayout(5, 0));
panelMailServerPort.add(jlMailServerPort, BorderLayout.WEST);
panelMailServerPort.add(tfMailServerPort, BorderLayout.CENTER);
panelMailServerPort.add(jlDutPortStandard, BorderLayout.EAST);
panelServerSettings.add(panelMailServer, BorderLayout.CENTER);
panelServerSettings.add(panelMailServerPort, BorderLayout.SOUTH);
JPanel panelServerTimeoutsSettings = new VerticalPanel();
panelServerTimeoutsSettings.setBorder(BorderFactory.createTitledBorder(
JMeterUtils.getResString("smtp_server_timeouts_settings"))); // $NON-NLS-1$
JPanel panelMailServerConnectionTimeout = new JPanel(new BorderLayout(5, 0));
panelMailServerConnectionTimeout.add(jlMailServerConnectionTimeout, BorderLayout.WEST);
panelMailServerConnectionTimeout.add(tfMailServerConnectionTimeout, BorderLayout.CENTER);
JPanel panelMailServerTimeout = new JPanel(new BorderLayout(5, 0));
panelMailServerTimeout.add(jlMailServerTimeout, BorderLayout.WEST);
panelMailServerTimeout.add(tfMailServerTimeout, BorderLayout.CENTER);
panelServerTimeoutsSettings.add(panelMailServerConnectionTimeout, BorderLayout.CENTER);
panelServerTimeoutsSettings.add(panelMailServerTimeout, BorderLayout.SOUTH);
JPanel panelServerConfig = new HorizontalPanel();
panelServerConfig.add(panelServerSettings, BorderLayout.CENTER);
panelServerConfig.add(panelServerTimeoutsSettings, BorderLayout.EAST);
gridBagConstraintsMain.gridx = 0;
gridBagConstraintsMain.gridy = 0;
add(panelServerConfig, gridBagConstraintsMain);
/*
* E-Mail Settings
*/
JPanel panelMailSettings = new JPanel(new GridBagLayout());
panelMailSettings.setBorder(BorderFactory.createTitledBorder(
JMeterUtils.getResString("smtp_mail_settings"))); // $NON-NLS-1$
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
panelMailSettings.add(jlAddressFrom, gridBagConstraints);
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 0;
panelMailSettings.add(tfMailFrom, gridBagConstraints);
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 1;
panelMailSettings.add(jlAddressTo, gridBagConstraints);
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 1;
panelMailSettings.add(tfMailTo, gridBagConstraints);
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 2;
panelMailSettings.add(jlAddressToCC, gridBagConstraints);
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 2;
panelMailSettings.add(tfMailToCC, gridBagConstraints);
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 3;
panelMailSettings.add(jlAddressToBCC, gridBagConstraints);
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 3;
panelMailSettings.add(tfMailToBCC, gridBagConstraints);
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 4;
panelMailSettings.add(jlAddressReplyTo, gridBagConstraints);
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 4;
panelMailSettings.add(tfMailReplyTo, gridBagConstraints);
gridBagConstraintsMain.gridx = 0;
gridBagConstraintsMain.gridy = 1;
add(panelMailSettings, gridBagConstraintsMain);
/*
* Auth Settings
*/
JPanel panelAuthSettings = new JPanel(new GridBagLayout());
panelAuthSettings.setBorder(BorderFactory.createTitledBorder(
JMeterUtils.getResString("smtp_auth_settings"))); // $NON-NLS-1$
cbUseAuth.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 0));
cbUseAuth.setMargin(new java.awt.Insets(0, 0, 0, 0));
cbUseAuth.addActionListener(this::cbUseAuthActionPerformed);
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
panelAuthSettings.add(cbUseAuth, gridBagConstraints);
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 0;
gridBagConstraints.gridwidth = 1;
gridBagConstraints.weightx = 0;
panelAuthSettings.add(jlUsername, gridBagConstraints);
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 0;
gridBagConstraints.gridwidth = 2;
gridBagConstraints.weightx = 0.5;
panelAuthSettings.add(tfAuthUsername, gridBagConstraints);
tfAuthUsername.setEditable(false);
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 1;
gridBagConstraints.gridwidth = 1;
gridBagConstraints.weightx = 0;
panelAuthSettings.add(jlPassword, gridBagConstraints);
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 1;
gridBagConstraints.weightx = 0.5;
panelAuthSettings.add(tfAuthPassword, gridBagConstraints);
tfAuthPassword.setEditable(false);
gridBagConstraintsMain.gridx = 0;
gridBagConstraintsMain.gridy = 2;
add(panelAuthSettings, gridBagConstraintsMain);
/*
* Security Settings
*/
securitySettingsPanel = new SecuritySettingsPanel();
gridBagConstraintsMain.gridx = 0;
gridBagConstraintsMain.gridy = 3;
add(securitySettingsPanel, gridBagConstraintsMain);
/*
* (non-Javadoc) Message Settings
*/
JPanel panelMessageSettings = new JPanel(new GridBagLayout());
panelMessageSettings.setBorder(BorderFactory.createTitledBorder(
JMeterUtils.getResString("smtp_message_settings"))); // $NON-NLS-1$
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
panelMessageSettings.add(jlSubject, gridBagConstraints);
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 0;
gridBagConstraints.fill = GridBagConstraints.HORIZONTAL;
panelMessageSettings.add(tfSubject, gridBagConstraints);
cbSuppressSubject.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 0));
cbSuppressSubject.setMargin(new java.awt.Insets(0, 0, 0, 0));
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 0;
gridBagConstraints.fill = GridBagConstraints.NONE;
panelMessageSettings.add(cbSuppressSubject, gridBagConstraints);
cbIncludeTimestamp.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 0));
cbIncludeTimestamp.setMargin(new java.awt.Insets(0, 0, 0, 0));
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 1;
gridBagConstraints.fill = GridBagConstraints.NONE;
panelMessageSettings.add(cbIncludeTimestamp, gridBagConstraints);
/*
* Add the header panel
*/
addHeaderFieldButton = new JButton(JMeterUtils.getResString("smtp_header_add")); // $NON-NLS-1$
addHeaderFieldButton.addActionListener(this::addHeaderActionPerformed);
headerFieldName = new JLabel(JMeterUtils.getResString("smtp_header_name")); // $NON-NLS-1$
headerFieldValue = new JLabel(JMeterUtils.getResString("smtp_header_value")); // $NON-NLS-1$
headerFieldsPanel = new JPanel(new GridBagLayout());
headerFieldName.setVisible(false);
headerFieldValue.setVisible(false);
headerGridY=0;
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = headerGridY++;
headerFieldsPanel.add(addHeaderFieldButton, gridBagConstraints);
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = headerGridY;
headerFieldsPanel.add(headerFieldName, gridBagConstraints);
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = headerGridY++;
headerFieldsPanel.add(headerFieldValue, gridBagConstraints);
gridBagConstraintsMain.gridx = 1;
gridBagConstraintsMain.gridy = 2;
panelMessageSettings.add(headerFieldsPanel, gridBagConstraintsMain);
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 3;
panelMessageSettings.add(jlMessage, gridBagConstraints);
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 3;
gridBagConstraints.fill = GridBagConstraints.BOTH;
panelMessageSettings.add(taMessage, gridBagConstraints);
cbPlainBody.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 0));
cbPlainBody.setMargin(new java.awt.Insets(0, 0, 0, 0));
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 3;
gridBagConstraints.fill = GridBagConstraints.NONE;
panelMessageSettings.add(cbPlainBody, gridBagConstraints);
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 4;
gridBagConstraints.fill = GridBagConstraints.NONE;
panelMessageSettings.add(jlAttachFile, gridBagConstraints);
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 4;
gridBagConstraints.fill = GridBagConstraints.HORIZONTAL;
panelMessageSettings.add(tfAttachment, gridBagConstraints);
tfAttachment.setToolTipText(JMeterUtils.getResString("smtp_attach_file_tooltip")); // $NON-NLS-1$
browseButton.addActionListener(this::browseButtonActionPerformed);
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 4;
gridBagConstraints.fill = GridBagConstraints.NONE;
panelMessageSettings.add(browseButton, gridBagConstraints);
cbUseEmlMessage.setSelected(false);
cbUseEmlMessage.addActionListener(this::cbUseEmlMessageActionPerformed);
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 5;
gridBagConstraints.fill = GridBagConstraints.NONE;
panelMessageSettings.add(cbUseEmlMessage, gridBagConstraints);
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 5;
gridBagConstraints.fill = GridBagConstraints.HORIZONTAL;
tfEmlMessage.setEnabled(false);
panelMessageSettings.add(tfEmlMessage, gridBagConstraints);
emlBrowseButton.addActionListener(this::emlBrowseButtonActionPerformed);
emlBrowseButton.setEnabled(false);
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 5;
gridBagConstraints.fill = GridBagConstraints.NONE;
panelMessageSettings.add(emlBrowseButton, gridBagConstraints);
gridBagConstraintsMain.gridx = 0;
gridBagConstraintsMain.gridy = 6;
add(panelMessageSettings, gridBagConstraintsMain);
/*
* Additional Settings
*/
JPanel panelAdditionalSettings = new JPanel(new GridBagLayout());
panelAdditionalSettings.setBorder(BorderFactory.createTitledBorder(
JMeterUtils.getResString("smtp_additional_settings"))); // $NON-NLS-1$
cbMessageSizeStats.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 0));
cbMessageSizeStats.setMargin(new java.awt.Insets(0, 0, 0, 0));
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
panelAdditionalSettings.add(cbMessageSizeStats, gridBagConstraints);
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 0;
panelAdditionalSettings.add(cbEnableDebug, gridBagConstraints);
gridBagConstraintsMain.gridx = 0;
gridBagConstraintsMain.gridy = 7;
add(panelAdditionalSettings, gridBagConstraintsMain);
}
/**
* ActionPerformed-method for checkbox "useAuth"
*
* @param evt
* ActionEvent to be handled
*/
private void cbUseAuthActionPerformed(ActionEvent evt) { // NOSONAR This method is used through lambda
tfAuthUsername.setEditable(cbUseAuth.isSelected());
tfAuthPassword.setEditable(cbUseAuth.isSelected());
}
/**
* ActionPerformed-method for filechoser "attachmentFileChoser", creates
* FileChoser-Object
*
* @param evt
* ActionEvent to be handled
*/
private void attachmentFolderFileChooserActionPerformed(ActionEvent evt) { // NOSONAR This method is used through lambda
File chosen = attachmentFileChooser.getSelectedFile();
if (chosen == null) {
return;
}
final String attachments = tfAttachment.getText().trim();
if (attachments.length() > 0) {
tfAttachment.setText(attachments
+ SmtpSampler.FILENAME_SEPARATOR
+ chosen.getAbsolutePath());
} else {
tfAttachment.setText(chosen.getAbsolutePath());
}
}
/**
* ActionPerformed-method for button "browseButton", opens FileDialog-Object
*
* @param evt
* ActionEvent to be handled
*/
private void browseButtonActionPerformed(ActionEvent evt) { // NOSONAR This method is used through lambda
attachmentFileChooser.showOpenDialog(this);
}
private void cbUseEmlMessageActionPerformed(ActionEvent evt) { // NOSONAR This method is used through lambda
if (cbUseEmlMessage.isSelected()) {
tfEmlMessage.setEnabled(true);
emlBrowseButton.setEnabled(true);
taMessage.setEnabled(false);
tfAttachment.setEnabled(false);
browseButton.setEnabled(false);
} else {
tfEmlMessage.setEnabled(false);
emlBrowseButton.setEnabled(false);
taMessage.setEnabled(true);
tfAttachment.setEnabled(true);
browseButton.setEnabled(true);
}
}
/**
* ActionPerformed-method for filechoser "emlFileChoser", creates
* FileChoser-Object
*
* @param evt
* ActionEvent to be handled
*/
private void emlFileChooserActionPerformed(ActionEvent evt) { // NOSONAR This method is used through lambda
tfEmlMessage.setText(emlFileChooser.getSelectedFile().getAbsolutePath());
}
/**
* ActionPerformed-method for button "emlButton", opens FileDialog-Object
*
* @param evt
* ActionEvent to be handled
*/
private void emlBrowseButtonActionPerformed(ActionEvent evt) { // NOSONAR This method is used through lambda
emlFileChooser.showOpenDialog(this);
}
/**
* Reset all the Gui fields.
*/
public void clear() {
cbIncludeTimestamp.setSelected(false);
cbMessageSizeStats.setSelected(false);
cbEnableDebug.setSelected(false);
cbUseEmlMessage.setSelected(false);
cbUseAuth.setSelected(false);
taMessage.setText("");
tfAttachment.setText("");
tfAuthPassword.setText("");
tfAuthUsername.setText("");
tfEmlMessage.setText("");
tfMailFrom.setText("");
tfMailReplyTo.setText("");
tfMailServer.setText("");
tfMailServerPort.setText("");
tfMailServerConnectionTimeout.setText("");
tfMailServerTimeout.setText("");
tfMailTo.setText("");
tfMailToBCC.setText("");
tfMailToCC.setText("");
tfSubject.setText("");
cbPlainBody.setSelected(false);
cbSuppressSubject.setSelected(false);
securitySettingsPanel.clear();
clearHeaderFields();
validate();
}
private void clearHeaderFields() {
headerFieldName.setVisible(false);
headerFieldValue.setVisible(false);
for (Iterator<JButton> iterator = removeButtons.keySet().iterator(); iterator.hasNext();) {
JButton removeButton = iterator.next();
JTextField headerName = removeButtons.get(removeButton);
JTextField headerValue = headerFields.get(headerName);
headerFieldsPanel.remove(headerName);
if (headerValue != null){ // Can be null (not sure why)
headerFieldsPanel.remove(headerValue);
}
headerFieldsPanel.remove(removeButton);
headerFields.remove(headerName);
iterator.remove();
}
}
private JButton addHeaderActionPerformed(ActionEvent evt){
if(headerFields.isEmpty()){
headerFieldName.setVisible(true);
headerFieldValue.setVisible(true);
}
JTextField nameTF = new JTextField();
JTextField valueTF = new JTextField();
JButton removeButton = new JButton(JMeterUtils.getResString("smtp_header_remove")); // $NON-NLS-1$
headerFields.put(nameTF, valueTF);
removeButtons.put(removeButton, nameTF);
removeButton.addActionListener(this::removeHeaderActionPerformed);
GridBagConstraints gridBagConstraints = new GridBagConstraints();
gridBagConstraints.insets = new java.awt.Insets(2, 2, 2, 2);
gridBagConstraints.weightx = 0.5;
gridBagConstraints.anchor = GridBagConstraints.WEST;
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = headerGridY;
gridBagConstraints.fill = GridBagConstraints.HORIZONTAL;
headerFieldsPanel.add(nameTF, gridBagConstraints);
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = headerGridY;
gridBagConstraints.fill = GridBagConstraints.HORIZONTAL;
headerFieldsPanel.add(valueTF, gridBagConstraints);
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = headerGridY++;
gridBagConstraints.fill = GridBagConstraints.NONE;
headerFieldsPanel.add(removeButton, gridBagConstraints);
validate();
return removeButton;
}
public SecuritySettingsPanel getSecuritySettingsPanel() {
return securitySettingsPanel;
}
public void setSecuritySettingsPanel(SecuritySettingsPanel securitySettingsPanel) {
this.securitySettingsPanel = securitySettingsPanel;
}
private void removeHeaderActionPerformed(ActionEvent evt){ // NOSONAR This method is used through lambda
final Object source = evt.getSource();
if(source instanceof JButton){
if(headerFields.size() == 1){
headerFieldName.setVisible(false);
headerFieldValue.setVisible(false);
}
JTextField nameTF = removeButtons.get(source);
JTextField valueTF = headerFields.get(nameTF);
headerFields.remove(nameTF);
headerFieldsPanel.remove(nameTF);
headerFieldsPanel.remove(valueTF);
headerFieldsPanel.remove((JButton)source);
validate();
}
}
private void emptySubjectActionPerformed(ChangeEvent evt) { // NOSONAR This method is used through lambda
final Object source = evt.getSource();
if(source instanceof JCheckBox){
if(cbSuppressSubject.isSelected()){
tfSubject.setEnabled(false);
cbIncludeTimestamp.setEnabled(false);
}else{
tfSubject.setEnabled(true);
cbIncludeTimestamp.setEnabled(true);
}
}
}
}
|
googleapis/google-cloud-java | 37,737 | java-private-catalog/proto-google-cloud-private-catalog-v1beta1/src/main/java/com/google/cloud/privatecatalog/v1beta1/SearchProductsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/privatecatalog/v1beta1/private_catalog.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.privatecatalog.v1beta1;
/**
*
*
* <pre>
* Response message for [PrivateCatalog.SearchProducts][google.cloud.privatecatalog.v1beta1.PrivateCatalog.SearchProducts].
* </pre>
*
* Protobuf type {@code google.cloud.privatecatalog.v1beta1.SearchProductsResponse}
*/
public final class SearchProductsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.privatecatalog.v1beta1.SearchProductsResponse)
SearchProductsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchProductsResponse.newBuilder() to construct.
private SearchProductsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchProductsResponse() {
products_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SearchProductsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto
.internal_static_google_cloud_privatecatalog_v1beta1_SearchProductsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto
.internal_static_google_cloud_privatecatalog_v1beta1_SearchProductsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse.class,
com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse.Builder.class);
}
public static final int PRODUCTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.privatecatalog.v1beta1.Product> products_;
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.privatecatalog.v1beta1.Product> getProductsList() {
return products_;
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.privatecatalog.v1beta1.ProductOrBuilder>
getProductsOrBuilderList() {
return products_;
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
@java.lang.Override
public int getProductsCount() {
return products_.size();
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
@java.lang.Override
public com.google.cloud.privatecatalog.v1beta1.Product getProducts(int index) {
return products_.get(index);
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
@java.lang.Override
public com.google.cloud.privatecatalog.v1beta1.ProductOrBuilder getProductsOrBuilder(int index) {
return products_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchProducts that
* indicates from where listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchProducts that
* indicates from where listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < products_.size(); i++) {
output.writeMessage(1, products_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < products_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, products_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse)) {
return super.equals(obj);
}
com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse other =
(com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse) obj;
if (!getProductsList().equals(other.getProductsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getProductsCount() > 0) {
hash = (37 * hash) + PRODUCTS_FIELD_NUMBER;
hash = (53 * hash) + getProductsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for [PrivateCatalog.SearchProducts][google.cloud.privatecatalog.v1beta1.PrivateCatalog.SearchProducts].
* </pre>
*
* Protobuf type {@code google.cloud.privatecatalog.v1beta1.SearchProductsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.privatecatalog.v1beta1.SearchProductsResponse)
com.google.cloud.privatecatalog.v1beta1.SearchProductsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto
.internal_static_google_cloud_privatecatalog_v1beta1_SearchProductsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto
.internal_static_google_cloud_privatecatalog_v1beta1_SearchProductsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse.class,
com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse.Builder.class);
}
// Construct using com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (productsBuilder_ == null) {
products_ = java.util.Collections.emptyList();
} else {
products_ = null;
productsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto
.internal_static_google_cloud_privatecatalog_v1beta1_SearchProductsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse
getDefaultInstanceForType() {
return com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse build() {
com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse buildPartial() {
com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse result =
new com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse result) {
if (productsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
products_ = java.util.Collections.unmodifiableList(products_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.products_ = products_;
} else {
result.products_ = productsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse) {
return mergeFrom((com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse other) {
if (other
== com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse.getDefaultInstance())
return this;
if (productsBuilder_ == null) {
if (!other.products_.isEmpty()) {
if (products_.isEmpty()) {
products_ = other.products_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureProductsIsMutable();
products_.addAll(other.products_);
}
onChanged();
}
} else {
if (!other.products_.isEmpty()) {
if (productsBuilder_.isEmpty()) {
productsBuilder_.dispose();
productsBuilder_ = null;
products_ = other.products_;
bitField0_ = (bitField0_ & ~0x00000001);
productsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getProductsFieldBuilder()
: null;
} else {
productsBuilder_.addAllMessages(other.products_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.privatecatalog.v1beta1.Product m =
input.readMessage(
com.google.cloud.privatecatalog.v1beta1.Product.parser(),
extensionRegistry);
if (productsBuilder_ == null) {
ensureProductsIsMutable();
products_.add(m);
} else {
productsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.privatecatalog.v1beta1.Product> products_ =
java.util.Collections.emptyList();
private void ensureProductsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
products_ =
new java.util.ArrayList<com.google.cloud.privatecatalog.v1beta1.Product>(products_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.privatecatalog.v1beta1.Product,
com.google.cloud.privatecatalog.v1beta1.Product.Builder,
com.google.cloud.privatecatalog.v1beta1.ProductOrBuilder>
productsBuilder_;
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
public java.util.List<com.google.cloud.privatecatalog.v1beta1.Product> getProductsList() {
if (productsBuilder_ == null) {
return java.util.Collections.unmodifiableList(products_);
} else {
return productsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
public int getProductsCount() {
if (productsBuilder_ == null) {
return products_.size();
} else {
return productsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
public com.google.cloud.privatecatalog.v1beta1.Product getProducts(int index) {
if (productsBuilder_ == null) {
return products_.get(index);
} else {
return productsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
public Builder setProducts(int index, com.google.cloud.privatecatalog.v1beta1.Product value) {
if (productsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureProductsIsMutable();
products_.set(index, value);
onChanged();
} else {
productsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
public Builder setProducts(
int index, com.google.cloud.privatecatalog.v1beta1.Product.Builder builderForValue) {
if (productsBuilder_ == null) {
ensureProductsIsMutable();
products_.set(index, builderForValue.build());
onChanged();
} else {
productsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
public Builder addProducts(com.google.cloud.privatecatalog.v1beta1.Product value) {
if (productsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureProductsIsMutable();
products_.add(value);
onChanged();
} else {
productsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
public Builder addProducts(int index, com.google.cloud.privatecatalog.v1beta1.Product value) {
if (productsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureProductsIsMutable();
products_.add(index, value);
onChanged();
} else {
productsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
public Builder addProducts(
com.google.cloud.privatecatalog.v1beta1.Product.Builder builderForValue) {
if (productsBuilder_ == null) {
ensureProductsIsMutable();
products_.add(builderForValue.build());
onChanged();
} else {
productsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
public Builder addProducts(
int index, com.google.cloud.privatecatalog.v1beta1.Product.Builder builderForValue) {
if (productsBuilder_ == null) {
ensureProductsIsMutable();
products_.add(index, builderForValue.build());
onChanged();
} else {
productsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
public Builder addAllProducts(
java.lang.Iterable<? extends com.google.cloud.privatecatalog.v1beta1.Product> values) {
if (productsBuilder_ == null) {
ensureProductsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, products_);
onChanged();
} else {
productsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
public Builder clearProducts() {
if (productsBuilder_ == null) {
products_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
productsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
public Builder removeProducts(int index) {
if (productsBuilder_ == null) {
ensureProductsIsMutable();
products_.remove(index);
onChanged();
} else {
productsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
public com.google.cloud.privatecatalog.v1beta1.Product.Builder getProductsBuilder(int index) {
return getProductsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
public com.google.cloud.privatecatalog.v1beta1.ProductOrBuilder getProductsOrBuilder(
int index) {
if (productsBuilder_ == null) {
return products_.get(index);
} else {
return productsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
public java.util.List<? extends com.google.cloud.privatecatalog.v1beta1.ProductOrBuilder>
getProductsOrBuilderList() {
if (productsBuilder_ != null) {
return productsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(products_);
}
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
public com.google.cloud.privatecatalog.v1beta1.Product.Builder addProductsBuilder() {
return getProductsFieldBuilder()
.addBuilder(com.google.cloud.privatecatalog.v1beta1.Product.getDefaultInstance());
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
public com.google.cloud.privatecatalog.v1beta1.Product.Builder addProductsBuilder(int index) {
return getProductsFieldBuilder()
.addBuilder(index, com.google.cloud.privatecatalog.v1beta1.Product.getDefaultInstance());
}
/**
*
*
* <pre>
* The `Product` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Product products = 1;</code>
*/
public java.util.List<com.google.cloud.privatecatalog.v1beta1.Product.Builder>
getProductsBuilderList() {
return getProductsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.privatecatalog.v1beta1.Product,
com.google.cloud.privatecatalog.v1beta1.Product.Builder,
com.google.cloud.privatecatalog.v1beta1.ProductOrBuilder>
getProductsFieldBuilder() {
if (productsBuilder_ == null) {
productsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.privatecatalog.v1beta1.Product,
com.google.cloud.privatecatalog.v1beta1.Product.Builder,
com.google.cloud.privatecatalog.v1beta1.ProductOrBuilder>(
products_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
products_ = null;
}
return productsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchProducts that
* indicates from where listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchProducts that
* indicates from where listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchProducts that
* indicates from where listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchProducts that
* indicates from where listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchProducts that
* indicates from where listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.privatecatalog.v1beta1.SearchProductsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.privatecatalog.v1beta1.SearchProductsResponse)
private static final com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse();
}
public static com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchProductsResponse> PARSER =
new com.google.protobuf.AbstractParser<SearchProductsResponse>() {
@java.lang.Override
public SearchProductsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SearchProductsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchProductsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.privatecatalog.v1beta1.SearchProductsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8 | 37,906 | jdk/src/solaris/classes/sun/print/UnixPrintService.java | /*
* Copyright (c) 2000, 2007, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.print;
import java.io.File;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Locale;
import javax.print.DocFlavor;
import javax.print.DocPrintJob;
import javax.print.PrintService;
import javax.print.ServiceUIFactory;
import javax.print.attribute.Attribute;
import javax.print.attribute.AttributeSet;
import javax.print.attribute.AttributeSetUtilities;
import javax.print.attribute.HashAttributeSet;
import javax.print.attribute.PrintServiceAttribute;
import javax.print.attribute.PrintServiceAttributeSet;
import javax.print.attribute.HashPrintServiceAttributeSet;
import javax.print.attribute.Size2DSyntax;
import javax.print.attribute.standard.PrinterName;
import javax.print.attribute.standard.PrinterIsAcceptingJobs;
import javax.print.attribute.standard.QueuedJobCount;
import javax.print.attribute.standard.JobName;
import javax.print.attribute.standard.JobSheets;
import javax.print.attribute.standard.RequestingUserName;
import javax.print.attribute.standard.Chromaticity;
import javax.print.attribute.standard.ColorSupported;
import javax.print.attribute.standard.Copies;
import javax.print.attribute.standard.CopiesSupported;
import javax.print.attribute.standard.Destination;
import javax.print.attribute.standard.Fidelity;
import javax.print.attribute.standard.Media;
import javax.print.attribute.standard.MediaPrintableArea;
import javax.print.attribute.standard.MediaSize;
import javax.print.attribute.standard.MediaSizeName;
import javax.print.attribute.standard.OrientationRequested;
import javax.print.attribute.standard.PageRanges;
import javax.print.attribute.standard.PrinterState;
import javax.print.attribute.standard.PrinterStateReason;
import javax.print.attribute.standard.PrinterStateReasons;
import javax.print.attribute.standard.Severity;
import javax.print.attribute.standard.SheetCollate;
import javax.print.attribute.standard.Sides;
import javax.print.event.PrintServiceAttributeListener;
public class UnixPrintService implements PrintService, AttributeUpdater,
SunPrinterJobService {
/* define doc flavors for text types in the default encoding of
* this platform since we can always read those.
*/
private static String encoding = "ISO8859_1";
private static DocFlavor textByteFlavor;
private static DocFlavor[] supportedDocFlavors = null;
private static final DocFlavor[] supportedDocFlavorsInit = {
DocFlavor.BYTE_ARRAY.POSTSCRIPT,
DocFlavor.INPUT_STREAM.POSTSCRIPT,
DocFlavor.URL.POSTSCRIPT,
DocFlavor.BYTE_ARRAY.GIF,
DocFlavor.INPUT_STREAM.GIF,
DocFlavor.URL.GIF,
DocFlavor.BYTE_ARRAY.JPEG,
DocFlavor.INPUT_STREAM.JPEG,
DocFlavor.URL.JPEG,
DocFlavor.BYTE_ARRAY.PNG,
DocFlavor.INPUT_STREAM.PNG,
DocFlavor.URL.PNG,
DocFlavor.CHAR_ARRAY.TEXT_PLAIN,
DocFlavor.READER.TEXT_PLAIN,
DocFlavor.STRING.TEXT_PLAIN,
DocFlavor.BYTE_ARRAY.TEXT_PLAIN_UTF_8,
DocFlavor.BYTE_ARRAY.TEXT_PLAIN_UTF_16,
DocFlavor.BYTE_ARRAY.TEXT_PLAIN_UTF_16BE,
DocFlavor.BYTE_ARRAY.TEXT_PLAIN_UTF_16LE,
DocFlavor.BYTE_ARRAY.TEXT_PLAIN_US_ASCII,
DocFlavor.INPUT_STREAM.TEXT_PLAIN_UTF_8,
DocFlavor.INPUT_STREAM.TEXT_PLAIN_UTF_16,
DocFlavor.INPUT_STREAM.TEXT_PLAIN_UTF_16BE,
DocFlavor.INPUT_STREAM.TEXT_PLAIN_UTF_16LE,
DocFlavor.INPUT_STREAM.TEXT_PLAIN_US_ASCII,
DocFlavor.URL.TEXT_PLAIN_UTF_8,
DocFlavor.URL.TEXT_PLAIN_UTF_16,
DocFlavor.URL.TEXT_PLAIN_UTF_16BE,
DocFlavor.URL.TEXT_PLAIN_UTF_16LE,
DocFlavor.URL.TEXT_PLAIN_US_ASCII,
DocFlavor.SERVICE_FORMATTED.PAGEABLE,
DocFlavor.SERVICE_FORMATTED.PRINTABLE,
DocFlavor.BYTE_ARRAY.AUTOSENSE,
DocFlavor.URL.AUTOSENSE,
DocFlavor.INPUT_STREAM.AUTOSENSE
};
private static final DocFlavor[] supportedHostDocFlavors = {
DocFlavor.BYTE_ARRAY.TEXT_PLAIN_HOST,
DocFlavor.INPUT_STREAM.TEXT_PLAIN_HOST,
DocFlavor.URL.TEXT_PLAIN_HOST
};
String[] lpcStatusCom = {
"",
"| grep -E '^[ 0-9a-zA-Z_-]*@' | awk '{print $2, $3}'"
};
String[] lpcQueueCom = {
"",
"| grep -E '^[ 0-9a-zA-Z_-]*@' | awk '{print $4}'"
};
static {
encoding = java.security.AccessController.doPrivileged(
new sun.security.action.GetPropertyAction("file.encoding"));
}
/* let's try to support a few of these */
private static final Class[] serviceAttrCats = {
PrinterName.class,
PrinterIsAcceptingJobs.class,
QueuedJobCount.class,
};
/* it turns out to be inconvenient to store the other categories
* separately because many attributes are in multiple categories.
*/
private static final Class[] otherAttrCats = {
Chromaticity.class,
Copies.class,
Destination.class,
Fidelity.class,
JobName.class,
JobSheets.class,
Media.class, /* have to support this somehow ... */
MediaPrintableArea.class,
OrientationRequested.class,
PageRanges.class,
RequestingUserName.class,
SheetCollate.class,
Sides.class,
};
private static int MAXCOPIES = 1000;
private static final MediaSizeName mediaSizes[] = {
MediaSizeName.NA_LETTER,
MediaSizeName.TABLOID,
MediaSizeName.LEDGER,
MediaSizeName.NA_LEGAL,
MediaSizeName.EXECUTIVE,
MediaSizeName.ISO_A3,
MediaSizeName.ISO_A4,
MediaSizeName.ISO_A5,
MediaSizeName.ISO_B4,
MediaSizeName.ISO_B5,
};
private String printer;
private PrinterName name;
private boolean isInvalid;
transient private PrintServiceAttributeSet lastSet;
transient private ServiceNotifier notifier = null;
UnixPrintService(String name) {
if (name == null) {
throw new IllegalArgumentException("null printer name");
}
printer = name;
isInvalid = false;
}
public void invalidateService() {
isInvalid = true;
}
public String getName() {
return printer;
}
private PrinterName getPrinterName() {
if (name == null) {
name = new PrinterName(printer, null);
}
return name;
}
private PrinterIsAcceptingJobs getPrinterIsAcceptingJobsSysV() {
String command = "/usr/bin/lpstat -a " + printer;
String results[]= UnixPrintServiceLookup.execCmd(command);
if (results != null && results.length > 0) {
if (results[0].startsWith(printer + " accepting requests")) {
return PrinterIsAcceptingJobs.ACCEPTING_JOBS;
}
else if (results[0].startsWith(printer)) {
/* As well as "myprinter accepting requests", look for
* "myprinter@somehost accepting requests".
*/
int index = printer.length();
String str = results[0];
if (str.length() > index &&
str.charAt(index) == '@' &&
str.indexOf(" accepting requests", index) > 0 &&
str.indexOf(" not accepting requests", index) == -1) {
return PrinterIsAcceptingJobs.ACCEPTING_JOBS;
}
}
}
return PrinterIsAcceptingJobs.NOT_ACCEPTING_JOBS ;
}
private PrinterIsAcceptingJobs getPrinterIsAcceptingJobsBSD() {
if (UnixPrintServiceLookup.cmdIndex ==
UnixPrintServiceLookup.UNINITIALIZED) {
UnixPrintServiceLookup.cmdIndex =
UnixPrintServiceLookup.getBSDCommandIndex();
}
String command = "/usr/sbin/lpc status " + printer
+ lpcStatusCom[UnixPrintServiceLookup.cmdIndex];
String results[]= UnixPrintServiceLookup.execCmd(command);
if (results != null && results.length > 0) {
if (UnixPrintServiceLookup.cmdIndex ==
UnixPrintServiceLookup.BSD_LPD_NG) {
if (results[0].startsWith("enabled enabled")) {
return PrinterIsAcceptingJobs.ACCEPTING_JOBS ;
}
} else {
if ((results[1].trim().startsWith("queuing is enabled") &&
results[2].trim().startsWith("printing is enabled")) ||
(results.length >= 4 &&
results[2].trim().startsWith("queuing is enabled") &&
results[3].trim().startsWith("printing is enabled"))) {
return PrinterIsAcceptingJobs.ACCEPTING_JOBS ;
}
}
}
return PrinterIsAcceptingJobs.NOT_ACCEPTING_JOBS ;
}
private PrinterIsAcceptingJobs getPrinterIsAcceptingJobs() {
if (UnixPrintServiceLookup.isSysV()) {
return getPrinterIsAcceptingJobsSysV();
} else if (UnixPrintServiceLookup.isBSD()) {
return getPrinterIsAcceptingJobsBSD();
} else {
return PrinterIsAcceptingJobs.ACCEPTING_JOBS;
}
}
private PrinterState getPrinterState() {
if (isInvalid) {
return PrinterState.STOPPED;
} else {
return null;
}
}
private PrinterStateReasons getPrinterStateReasons() {
if (isInvalid) {
PrinterStateReasons psr = new PrinterStateReasons();
psr.put(PrinterStateReason.SHUTDOWN, Severity.ERROR);
return psr;
} else {
return null;
}
}
private QueuedJobCount getQueuedJobCountSysV() {
String command = "/usr/bin/lpstat -R " + printer;
String results[]= UnixPrintServiceLookup.execCmd(command);
int qlen = (results == null) ? 0 : results.length;
return new QueuedJobCount(qlen);
}
private QueuedJobCount getQueuedJobCountBSD() {
if (UnixPrintServiceLookup.cmdIndex ==
UnixPrintServiceLookup.UNINITIALIZED) {
UnixPrintServiceLookup.cmdIndex =
UnixPrintServiceLookup.getBSDCommandIndex();
}
int qlen = 0;
String command = "/usr/sbin/lpc status " + printer
+ lpcQueueCom[UnixPrintServiceLookup.cmdIndex];
String results[] = UnixPrintServiceLookup.execCmd(command);
if (results != null && results.length > 0) {
String queued;
if (UnixPrintServiceLookup.cmdIndex ==
UnixPrintServiceLookup.BSD_LPD_NG) {
queued = results[0];
} else {
queued = results[3].trim();
if (queued.startsWith("no")) {
return new QueuedJobCount(0);
} else {
queued = queued.substring(0, queued.indexOf(' '));
}
}
try {
qlen = Integer.parseInt(queued);
} catch (NumberFormatException e) {
}
}
return new QueuedJobCount(qlen);
}
private QueuedJobCount getQueuedJobCount() {
if (UnixPrintServiceLookup.isSysV()) {
return getQueuedJobCountSysV();
} else if (UnixPrintServiceLookup.isBSD()) {
return getQueuedJobCountBSD();
} else {
return new QueuedJobCount(0);
}
}
private PrintServiceAttributeSet getSysVServiceAttributes() {
PrintServiceAttributeSet attrs = new HashPrintServiceAttributeSet();
attrs.add(getQueuedJobCountSysV());
attrs.add(getPrinterIsAcceptingJobsSysV());
return attrs;
}
private PrintServiceAttributeSet getBSDServiceAttributes() {
PrintServiceAttributeSet attrs = new HashPrintServiceAttributeSet();
attrs.add(getQueuedJobCountBSD());
attrs.add(getPrinterIsAcceptingJobsBSD());
return attrs;
}
private boolean isSupportedCopies(Copies copies) {
int numCopies = copies.getValue();
return (numCopies > 0 && numCopies < MAXCOPIES);
}
private boolean isSupportedMedia(MediaSizeName msn) {
for (int i=0; i<mediaSizes.length; i++) {
if (msn.equals(mediaSizes[i])) {
return true;
}
}
return false;
}
public DocPrintJob createPrintJob() {
SecurityManager security = System.getSecurityManager();
if (security != null) {
security.checkPrintJobAccess();
}
return new UnixPrintJob(this);
}
private PrintServiceAttributeSet getDynamicAttributes() {
if (UnixPrintServiceLookup.isSysV()) {
return getSysVServiceAttributes();
} else {
return getBSDServiceAttributes();
}
}
public PrintServiceAttributeSet getUpdatedAttributes() {
PrintServiceAttributeSet currSet = getDynamicAttributes();
if (lastSet == null) {
lastSet = currSet;
return AttributeSetUtilities.unmodifiableView(currSet);
} else {
PrintServiceAttributeSet updates =
new HashPrintServiceAttributeSet();
Attribute []attrs = currSet.toArray();
Attribute attr;
for (int i=0; i<attrs.length; i++) {
attr = attrs[i];
if (!lastSet.containsValue(attr)) {
updates.add(attr);
}
}
lastSet = currSet;
return AttributeSetUtilities.unmodifiableView(updates);
}
}
public void wakeNotifier() {
synchronized (this) {
if (notifier != null) {
notifier.wake();
}
}
}
public void addPrintServiceAttributeListener(
PrintServiceAttributeListener listener) {
synchronized (this) {
if (listener == null) {
return;
}
if (notifier == null) {
notifier = new ServiceNotifier(this);
}
notifier.addListener(listener);
}
}
public void removePrintServiceAttributeListener(
PrintServiceAttributeListener listener) {
synchronized (this) {
if (listener == null || notifier == null ) {
return;
}
notifier.removeListener(listener);
if (notifier.isEmpty()) {
notifier.stopNotifier();
notifier = null;
}
}
}
public <T extends PrintServiceAttribute>
T getAttribute(Class<T> category)
{
if (category == null) {
throw new NullPointerException("category");
}
if (!(PrintServiceAttribute.class.isAssignableFrom(category))) {
throw new IllegalArgumentException("Not a PrintServiceAttribute");
}
if (category == PrinterName.class) {
return (T)getPrinterName();
} else if (category == PrinterState.class) {
return (T)getPrinterState();
} else if (category == PrinterStateReasons.class) {
return (T)getPrinterStateReasons();
} else if (category == QueuedJobCount.class) {
return (T)getQueuedJobCount();
} else if (category == PrinterIsAcceptingJobs.class) {
return (T)getPrinterIsAcceptingJobs();
} else {
return null;
}
}
public PrintServiceAttributeSet getAttributes() {
PrintServiceAttributeSet attrs = new HashPrintServiceAttributeSet();
attrs.add(getPrinterName());
attrs.add(getPrinterIsAcceptingJobs());
PrinterState prnState = getPrinterState();
if (prnState != null) {
attrs.add(prnState);
}
PrinterStateReasons prnStateReasons = getPrinterStateReasons();
if (prnStateReasons != null) {
attrs.add(prnStateReasons);
}
attrs.add(getQueuedJobCount());
return AttributeSetUtilities.unmodifiableView(attrs);
}
private void initSupportedDocFlavors() {
String hostEnc = DocFlavor.hostEncoding.toLowerCase(Locale.ENGLISH);
if (!hostEnc.equals("utf-8") && !hostEnc.equals("utf-16") &&
!hostEnc.equals("utf-16be") && !hostEnc.equals("utf-16le") &&
!hostEnc.equals("us-ascii")) {
int len = supportedDocFlavorsInit.length;
DocFlavor[] flavors =
new DocFlavor[len + supportedHostDocFlavors.length];
// copy host encoding flavors
System.arraycopy(supportedHostDocFlavors, 0, flavors,
len, supportedHostDocFlavors.length);
System.arraycopy(supportedDocFlavorsInit, 0, flavors, 0, len);
supportedDocFlavors = flavors;
} else {
supportedDocFlavors = supportedDocFlavorsInit;
}
}
public DocFlavor[] getSupportedDocFlavors() {
if (supportedDocFlavors == null) {
initSupportedDocFlavors();
}
int len = supportedDocFlavors.length;
DocFlavor[] flavors = new DocFlavor[len];
System.arraycopy(supportedDocFlavors, 0, flavors, 0, len);
return flavors;
}
public boolean isDocFlavorSupported(DocFlavor flavor) {
if (supportedDocFlavors == null) {
initSupportedDocFlavors();
}
for (int f=0; f<supportedDocFlavors.length; f++) {
if (flavor.equals(supportedDocFlavors[f])) {
return true;
}
}
return false;
}
public Class[] getSupportedAttributeCategories() {
int totalCats = otherAttrCats.length;
Class [] cats = new Class[totalCats];
System.arraycopy(otherAttrCats, 0, cats, 0, otherAttrCats.length);
return cats;
}
public boolean
isAttributeCategorySupported(Class<? extends Attribute> category)
{
if (category == null) {
throw new NullPointerException("null category");
}
if (!(Attribute.class.isAssignableFrom(category))) {
throw new IllegalArgumentException(category +
" is not an Attribute");
}
for (int i=0;i<otherAttrCats.length;i++) {
if (category == otherAttrCats[i]) {
return true;
}
}
return false;
}
/* return defaults for all attributes for which there is a default
* value
*/
public Object
getDefaultAttributeValue(Class<? extends Attribute> category)
{
if (category == null) {
throw new NullPointerException("null category");
}
if (!Attribute.class.isAssignableFrom(category)) {
throw new IllegalArgumentException(category +
" is not an Attribute");
}
if (!isAttributeCategorySupported(category)) {
return null;
}
if (category == Copies.class) {
return new Copies(1);
} else if (category == Chromaticity.class) {
return Chromaticity.COLOR;
} else if (category == Destination.class) {
try {
return new Destination((new File("out.ps")).toURI());
} catch (SecurityException se) {
try {
return new Destination(new URI("file:out.ps"));
} catch (URISyntaxException e) {
return null;
}
}
} else if (category == Fidelity.class) {
return Fidelity.FIDELITY_FALSE;
} else if (category == JobName.class) {
return new JobName("Java Printing", null);
} else if (category == JobSheets.class) {
return JobSheets.STANDARD;
} else if (category == Media.class) {
String defaultCountry = Locale.getDefault().getCountry();
if (defaultCountry != null &&
(defaultCountry.equals("") ||
defaultCountry.equals(Locale.US.getCountry()) ||
defaultCountry.equals(Locale.CANADA.getCountry()))) {
return MediaSizeName.NA_LETTER;
} else {
return MediaSizeName.ISO_A4;
}
} else if (category == MediaPrintableArea.class) {
String defaultCountry = Locale.getDefault().getCountry();
float iw, ih;
if (defaultCountry != null &&
(defaultCountry.equals("") ||
defaultCountry.equals(Locale.US.getCountry()) ||
defaultCountry.equals(Locale.CANADA.getCountry()))) {
iw = MediaSize.NA.LETTER.getX(Size2DSyntax.INCH) - 0.5f;
ih = MediaSize.NA.LETTER.getY(Size2DSyntax.INCH) - 0.5f;
} else {
iw = MediaSize.ISO.A4.getX(Size2DSyntax.INCH) - 0.5f;
ih = MediaSize.ISO.A4.getY(Size2DSyntax.INCH) - 0.5f;
}
return new MediaPrintableArea(0.25f, 0.25f, iw, ih,
MediaPrintableArea.INCH);
} else if (category == OrientationRequested.class) {
return OrientationRequested.PORTRAIT;
} else if (category == PageRanges.class) {
return new PageRanges(1, Integer.MAX_VALUE);
} else if (category == RequestingUserName.class) {
String userName = "";
try {
userName = System.getProperty("user.name", "");
} catch (SecurityException se) {
}
return new RequestingUserName(userName, null);
} else if (category == SheetCollate.class) {
return SheetCollate.UNCOLLATED;
} else if (category == Sides.class) {
return Sides.ONE_SIDED;
} else
return null;
}
private boolean isAutoSense(DocFlavor flavor) {
if (flavor.equals(DocFlavor.BYTE_ARRAY.AUTOSENSE) ||
flavor.equals(DocFlavor.INPUT_STREAM.AUTOSENSE) ||
flavor.equals(DocFlavor.URL.AUTOSENSE)) {
return true;
}
else {
return false;
}
}
public Object
getSupportedAttributeValues(Class<? extends Attribute> category,
DocFlavor flavor,
AttributeSet attributes)
{
if (category == null) {
throw new NullPointerException("null category");
}
if (!Attribute.class.isAssignableFrom(category)) {
throw new IllegalArgumentException(category +
" does not implement Attribute");
}
if (flavor != null) {
if (!isDocFlavorSupported(flavor)) {
throw new IllegalArgumentException(flavor +
" is an unsupported flavor");
} else if (isAutoSense(flavor)) {
return null;
}
}
if (!isAttributeCategorySupported(category)) {
return null;
}
if (category == Chromaticity.class) {
if (flavor == null || isServiceFormattedFlavor(flavor)) {
Chromaticity[]arr = new Chromaticity[1];
arr[0] = Chromaticity.COLOR;
return (arr);
} else {
return null;
}
} else if (category == Destination.class) {
try {
return new Destination((new File("out.ps")).toURI());
} catch (SecurityException se) {
try {
return new Destination(new URI("file:out.ps"));
} catch (URISyntaxException e) {
return null;
}
}
} else if (category == JobName.class) {
return new JobName("Java Printing", null);
} else if (category == JobSheets.class) {
JobSheets arr[] = new JobSheets[2];
arr[0] = JobSheets.NONE;
arr[1] = JobSheets.STANDARD;
return arr;
} else if (category == RequestingUserName.class) {
String userName = "";
try {
userName = System.getProperty("user.name", "");
} catch (SecurityException se) {
}
return new RequestingUserName(userName, null);
} else if (category == OrientationRequested.class) {
if (flavor == null || isServiceFormattedFlavor(flavor)) {
OrientationRequested []arr = new OrientationRequested[3];
arr[0] = OrientationRequested.PORTRAIT;
arr[1] = OrientationRequested.LANDSCAPE;
arr[2] = OrientationRequested.REVERSE_LANDSCAPE;
return arr;
} else {
return null;
}
} else if ((category == Copies.class) ||
(category == CopiesSupported.class)) {
if (flavor == null ||
!(flavor.equals(DocFlavor.INPUT_STREAM.POSTSCRIPT) ||
flavor.equals(DocFlavor.URL.POSTSCRIPT) ||
flavor.equals(DocFlavor.BYTE_ARRAY.POSTSCRIPT))) {
return new CopiesSupported(1, MAXCOPIES);
} else {
return null;
}
} else if (category == Media.class) {
Media []arr = new Media[mediaSizes.length];
System.arraycopy(mediaSizes, 0, arr, 0, mediaSizes.length);
return arr;
} else if (category == Fidelity.class) {
Fidelity []arr = new Fidelity[2];
arr[0] = Fidelity.FIDELITY_FALSE;
arr[1] = Fidelity.FIDELITY_TRUE;
return arr;
} else if (category == MediaPrintableArea.class) {
/* The code below implements the behaviour that if no Media or
* MediaSize attribute is specified, return an array of
* MediaPrintableArea, one for each supported Media.
* If a MediaSize is specified, return a MPA consistent for that,
* and if a Media is specified locate its MediaSize and return
* its MPA, and if none is found, return an MPA for the default
* Media for this service.
*/
if (attributes == null) {
return getAllPrintableAreas();
}
MediaSize mediaSize = (MediaSize)attributes.get(MediaSize.class);
Media media = (Media)attributes.get(Media.class);
MediaPrintableArea []arr = new MediaPrintableArea[1];
if (mediaSize == null) {
if (media instanceof MediaSizeName) {
MediaSizeName msn = (MediaSizeName)media;
mediaSize = MediaSize.getMediaSizeForName(msn);
if (mediaSize == null) {
/* try to get a size from the default media */
media = (Media)getDefaultAttributeValue(Media.class);
if (media instanceof MediaSizeName) {
msn = (MediaSizeName)media;
mediaSize = MediaSize.getMediaSizeForName(msn);
}
if (mediaSize == null) {
/* shouldn't happen, return a default */
arr[0] = new MediaPrintableArea(0.25f, 0.25f,
8f, 10.5f,
MediaSize.INCH);
return arr;
}
}
} else {
return getAllPrintableAreas();
}
}
/* If reach here MediaSize is non-null */
assert mediaSize != null;
arr[0] = new MediaPrintableArea(0.25f, 0.25f,
mediaSize.getX(MediaSize.INCH)-0.5f,
mediaSize.getY(MediaSize.INCH)-0.5f,
MediaSize.INCH);
return arr;
} else if (category == PageRanges.class) {
if (flavor == null ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PAGEABLE) ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PRINTABLE)) {
PageRanges []arr = new PageRanges[1];
arr[0] = new PageRanges(1, Integer.MAX_VALUE);
return arr;
} else {
return null;
}
} else if (category == SheetCollate.class) {
if (flavor == null ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PAGEABLE) ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PRINTABLE)) {
SheetCollate []arr = new SheetCollate[2];
arr[0] = SheetCollate.UNCOLLATED;
arr[1] = SheetCollate.COLLATED;
return arr;
} else {
return null;
}
} else if (category == Sides.class) {
if (flavor == null ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PAGEABLE) ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PRINTABLE)) {
Sides []arr = new Sides[3];
arr[0] = Sides.ONE_SIDED;
arr[1] = Sides.TWO_SIDED_LONG_EDGE;
arr[2] = Sides.TWO_SIDED_SHORT_EDGE;
return arr;
} else {
return null;
}
} else {
return null;
}
}
private static MediaPrintableArea[] mpas = null;
private MediaPrintableArea[] getAllPrintableAreas() {
if (mpas == null) {
Media[] media = (Media[])getSupportedAttributeValues(Media.class,
null, null);
mpas = new MediaPrintableArea[media.length];
for (int i=0; i< mpas.length; i++) {
if (media[i] instanceof MediaSizeName) {
MediaSizeName msn = (MediaSizeName)media[i];
MediaSize mediaSize = MediaSize.getMediaSizeForName(msn);
if (mediaSize == null) {
mpas[i] = (MediaPrintableArea)
getDefaultAttributeValue(MediaPrintableArea.class);
} else {
mpas[i] = new MediaPrintableArea(0.25f, 0.25f,
mediaSize.getX(MediaSize.INCH)-0.5f,
mediaSize.getY(MediaSize.INCH)-0.5f,
MediaSize.INCH);
}
}
}
}
MediaPrintableArea[] mpasCopy = new MediaPrintableArea[mpas.length];
System.arraycopy(mpas, 0, mpasCopy, 0, mpas.length);
return mpasCopy;
}
/* Is this one of the flavors that this service explicitly
* generates postscript for, and so can control how it is rendered?
*/
private boolean isServiceFormattedFlavor(DocFlavor flavor) {
return
flavor.equals(DocFlavor.SERVICE_FORMATTED.PAGEABLE) ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PRINTABLE) ||
flavor.equals(DocFlavor.BYTE_ARRAY.GIF) ||
flavor.equals(DocFlavor.INPUT_STREAM.GIF) ||
flavor.equals(DocFlavor.URL.GIF) ||
flavor.equals(DocFlavor.BYTE_ARRAY.JPEG) ||
flavor.equals(DocFlavor.INPUT_STREAM.JPEG) ||
flavor.equals(DocFlavor.URL.JPEG) ||
flavor.equals(DocFlavor.BYTE_ARRAY.PNG) ||
flavor.equals(DocFlavor.INPUT_STREAM.PNG) ||
flavor.equals(DocFlavor.URL.PNG);
}
public boolean isAttributeValueSupported(Attribute attr,
DocFlavor flavor,
AttributeSet attributes) {
if (attr == null) {
throw new NullPointerException("null attribute");
}
if (flavor != null) {
if (!isDocFlavorSupported(flavor)) {
throw new IllegalArgumentException(flavor +
" is an unsupported flavor");
} else if (isAutoSense(flavor)) {
return false;
}
}
Class category = attr.getCategory();
if (!isAttributeCategorySupported(category)) {
return false;
}
else if (attr.getCategory() == Chromaticity.class) {
if (flavor == null || isServiceFormattedFlavor(flavor)) {
return attr == Chromaticity.COLOR;
} else {
return false;
}
}
else if (attr.getCategory() == Copies.class) {
return (flavor == null ||
!(flavor.equals(DocFlavor.INPUT_STREAM.POSTSCRIPT) ||
flavor.equals(DocFlavor.URL.POSTSCRIPT) ||
flavor.equals(DocFlavor.BYTE_ARRAY.POSTSCRIPT))) &&
isSupportedCopies((Copies)attr);
} else if (attr.getCategory() == Destination.class) {
URI uri = ((Destination)attr).getURI();
if ("file".equals(uri.getScheme()) &&
!(uri.getSchemeSpecificPart().equals(""))) {
return true;
} else {
return false;
}
} else if (attr.getCategory() == Media.class) {
if (attr instanceof MediaSizeName) {
return isSupportedMedia((MediaSizeName)attr);
} else {
return false;
}
} else if (attr.getCategory() == OrientationRequested.class) {
if (attr == OrientationRequested.REVERSE_PORTRAIT ||
(flavor != null) &&
!isServiceFormattedFlavor(flavor)) {
return false;
}
} else if (attr.getCategory() == PageRanges.class) {
if (flavor != null &&
!(flavor.equals(DocFlavor.SERVICE_FORMATTED.PAGEABLE) ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PRINTABLE))) {
return false;
}
} else if (attr.getCategory() == SheetCollate.class) {
if (flavor != null &&
!(flavor.equals(DocFlavor.SERVICE_FORMATTED.PAGEABLE) ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PRINTABLE))) {
return false;
}
} else if (attr.getCategory() == Sides.class) {
if (flavor != null &&
!(flavor.equals(DocFlavor.SERVICE_FORMATTED.PAGEABLE) ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PRINTABLE))) {
return false;
}
}
return true;
}
public AttributeSet getUnsupportedAttributes(DocFlavor flavor,
AttributeSet attributes) {
if (flavor != null && !isDocFlavorSupported(flavor)) {
throw new IllegalArgumentException("flavor " + flavor +
"is not supported");
}
if (attributes == null) {
return null;
}
Attribute attr;
AttributeSet unsupp = new HashAttributeSet();
Attribute []attrs = attributes.toArray();
for (int i=0; i<attrs.length; i++) {
try {
attr = attrs[i];
if (!isAttributeCategorySupported(attr.getCategory())) {
unsupp.add(attr);
} else if (!isAttributeValueSupported(attr, flavor,
attributes)) {
unsupp.add(attr);
}
} catch (ClassCastException e) {
}
}
if (unsupp.isEmpty()) {
return null;
} else {
return unsupp;
}
}
public ServiceUIFactory getServiceUIFactory() {
return null;
}
public String toString() {
return "Unix Printer : " + getName();
}
public boolean equals(Object obj) {
return (obj == this ||
(obj instanceof UnixPrintService &&
((UnixPrintService)obj).getName().equals(getName())));
}
public int hashCode() {
return this.getClass().hashCode()+getName().hashCode();
}
public boolean usesClass(Class c) {
return (c == sun.print.PSPrinterJob.class);
}
}
|
googleapis/google-cloud-java | 37,722 | java-maps-area-insights/proto-google-maps-area-insights-v1/src/main/java/com/google/maps/areainsights/v1/ComputeInsightsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/maps/areainsights/v1/area_insights_service.proto
// Protobuf Java Version: 3.25.8
package com.google.maps.areainsights.v1;
/**
*
*
* <pre>
* Request for the ComputeInsights RPC.
* </pre>
*
* Protobuf type {@code google.maps.areainsights.v1.ComputeInsightsRequest}
*/
public final class ComputeInsightsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.maps.areainsights.v1.ComputeInsightsRequest)
ComputeInsightsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ComputeInsightsRequest.newBuilder() to construct.
private ComputeInsightsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ComputeInsightsRequest() {
insights_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ComputeInsightsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.maps.areainsights.v1.AreaInsightsServiceProto
.internal_static_google_maps_areainsights_v1_ComputeInsightsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.maps.areainsights.v1.AreaInsightsServiceProto
.internal_static_google_maps_areainsights_v1_ComputeInsightsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.maps.areainsights.v1.ComputeInsightsRequest.class,
com.google.maps.areainsights.v1.ComputeInsightsRequest.Builder.class);
}
private int bitField0_;
public static final int INSIGHTS_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private java.util.List<java.lang.Integer> insights_;
private static final com.google.protobuf.Internal.ListAdapter.Converter<
java.lang.Integer, com.google.maps.areainsights.v1.Insight>
insights_converter_ =
new com.google.protobuf.Internal.ListAdapter.Converter<
java.lang.Integer, com.google.maps.areainsights.v1.Insight>() {
public com.google.maps.areainsights.v1.Insight convert(java.lang.Integer from) {
com.google.maps.areainsights.v1.Insight result =
com.google.maps.areainsights.v1.Insight.forNumber(from);
return result == null ? com.google.maps.areainsights.v1.Insight.UNRECOGNIZED : result;
}
};
/**
*
*
* <pre>
* Required. Insights to compute. Currently only INSIGHT_COUNT and
* INSIGHT_PLACES are supported.
* </pre>
*
* <code>
* repeated .google.maps.areainsights.v1.Insight insights = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return A list containing the insights.
*/
@java.lang.Override
public java.util.List<com.google.maps.areainsights.v1.Insight> getInsightsList() {
return new com.google.protobuf.Internal.ListAdapter<
java.lang.Integer, com.google.maps.areainsights.v1.Insight>(insights_, insights_converter_);
}
/**
*
*
* <pre>
* Required. Insights to compute. Currently only INSIGHT_COUNT and
* INSIGHT_PLACES are supported.
* </pre>
*
* <code>
* repeated .google.maps.areainsights.v1.Insight insights = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The count of insights.
*/
@java.lang.Override
public int getInsightsCount() {
return insights_.size();
}
/**
*
*
* <pre>
* Required. Insights to compute. Currently only INSIGHT_COUNT and
* INSIGHT_PLACES are supported.
* </pre>
*
* <code>
* repeated .google.maps.areainsights.v1.Insight insights = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param index The index of the element to return.
* @return The insights at the given index.
*/
@java.lang.Override
public com.google.maps.areainsights.v1.Insight getInsights(int index) {
return insights_converter_.convert(insights_.get(index));
}
/**
*
*
* <pre>
* Required. Insights to compute. Currently only INSIGHT_COUNT and
* INSIGHT_PLACES are supported.
* </pre>
*
* <code>
* repeated .google.maps.areainsights.v1.Insight insights = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return A list containing the enum numeric values on the wire for insights.
*/
@java.lang.Override
public java.util.List<java.lang.Integer> getInsightsValueList() {
return insights_;
}
/**
*
*
* <pre>
* Required. Insights to compute. Currently only INSIGHT_COUNT and
* INSIGHT_PLACES are supported.
* </pre>
*
* <code>
* repeated .google.maps.areainsights.v1.Insight insights = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param index The index of the value to return.
* @return The enum numeric value on the wire of insights at the given index.
*/
@java.lang.Override
public int getInsightsValue(int index) {
return insights_.get(index);
}
private int insightsMemoizedSerializedSize;
public static final int FILTER_FIELD_NUMBER = 5;
private com.google.maps.areainsights.v1.Filter filter_;
/**
*
*
* <pre>
* Required. Insight filter.
* </pre>
*
* <code>.google.maps.areainsights.v1.Filter filter = 5 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the filter field is set.
*/
@java.lang.Override
public boolean hasFilter() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Insight filter.
* </pre>
*
* <code>.google.maps.areainsights.v1.Filter filter = 5 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The filter.
*/
@java.lang.Override
public com.google.maps.areainsights.v1.Filter getFilter() {
return filter_ == null ? com.google.maps.areainsights.v1.Filter.getDefaultInstance() : filter_;
}
/**
*
*
* <pre>
* Required. Insight filter.
* </pre>
*
* <code>.google.maps.areainsights.v1.Filter filter = 5 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.maps.areainsights.v1.FilterOrBuilder getFilterOrBuilder() {
return filter_ == null ? com.google.maps.areainsights.v1.Filter.getDefaultInstance() : filter_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
getSerializedSize();
if (getInsightsList().size() > 0) {
output.writeUInt32NoTag(34);
output.writeUInt32NoTag(insightsMemoizedSerializedSize);
}
for (int i = 0; i < insights_.size(); i++) {
output.writeEnumNoTag(insights_.get(i));
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(5, getFilter());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < insights_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream.computeEnumSizeNoTag(insights_.get(i));
}
size += dataSize;
if (!getInsightsList().isEmpty()) {
size += 1;
size += com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(dataSize);
}
insightsMemoizedSerializedSize = dataSize;
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(5, getFilter());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.maps.areainsights.v1.ComputeInsightsRequest)) {
return super.equals(obj);
}
com.google.maps.areainsights.v1.ComputeInsightsRequest other =
(com.google.maps.areainsights.v1.ComputeInsightsRequest) obj;
if (!insights_.equals(other.insights_)) return false;
if (hasFilter() != other.hasFilter()) return false;
if (hasFilter()) {
if (!getFilter().equals(other.getFilter())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getInsightsCount() > 0) {
hash = (37 * hash) + INSIGHTS_FIELD_NUMBER;
hash = (53 * hash) + insights_.hashCode();
}
if (hasFilter()) {
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.maps.areainsights.v1.ComputeInsightsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.maps.areainsights.v1.ComputeInsightsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.maps.areainsights.v1.ComputeInsightsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.maps.areainsights.v1.ComputeInsightsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.maps.areainsights.v1.ComputeInsightsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.maps.areainsights.v1.ComputeInsightsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.maps.areainsights.v1.ComputeInsightsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.maps.areainsights.v1.ComputeInsightsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.maps.areainsights.v1.ComputeInsightsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.maps.areainsights.v1.ComputeInsightsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.maps.areainsights.v1.ComputeInsightsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.maps.areainsights.v1.ComputeInsightsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.maps.areainsights.v1.ComputeInsightsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for the ComputeInsights RPC.
* </pre>
*
* Protobuf type {@code google.maps.areainsights.v1.ComputeInsightsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.maps.areainsights.v1.ComputeInsightsRequest)
com.google.maps.areainsights.v1.ComputeInsightsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.maps.areainsights.v1.AreaInsightsServiceProto
.internal_static_google_maps_areainsights_v1_ComputeInsightsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.maps.areainsights.v1.AreaInsightsServiceProto
.internal_static_google_maps_areainsights_v1_ComputeInsightsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.maps.areainsights.v1.ComputeInsightsRequest.class,
com.google.maps.areainsights.v1.ComputeInsightsRequest.Builder.class);
}
// Construct using com.google.maps.areainsights.v1.ComputeInsightsRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getFilterFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
insights_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
filter_ = null;
if (filterBuilder_ != null) {
filterBuilder_.dispose();
filterBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.maps.areainsights.v1.AreaInsightsServiceProto
.internal_static_google_maps_areainsights_v1_ComputeInsightsRequest_descriptor;
}
@java.lang.Override
public com.google.maps.areainsights.v1.ComputeInsightsRequest getDefaultInstanceForType() {
return com.google.maps.areainsights.v1.ComputeInsightsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.maps.areainsights.v1.ComputeInsightsRequest build() {
com.google.maps.areainsights.v1.ComputeInsightsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.maps.areainsights.v1.ComputeInsightsRequest buildPartial() {
com.google.maps.areainsights.v1.ComputeInsightsRequest result =
new com.google.maps.areainsights.v1.ComputeInsightsRequest(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.maps.areainsights.v1.ComputeInsightsRequest result) {
if (((bitField0_ & 0x00000001) != 0)) {
insights_ = java.util.Collections.unmodifiableList(insights_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.insights_ = insights_;
}
private void buildPartial0(com.google.maps.areainsights.v1.ComputeInsightsRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.filter_ = filterBuilder_ == null ? filter_ : filterBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.maps.areainsights.v1.ComputeInsightsRequest) {
return mergeFrom((com.google.maps.areainsights.v1.ComputeInsightsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.maps.areainsights.v1.ComputeInsightsRequest other) {
if (other == com.google.maps.areainsights.v1.ComputeInsightsRequest.getDefaultInstance())
return this;
if (!other.insights_.isEmpty()) {
if (insights_.isEmpty()) {
insights_ = other.insights_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureInsightsIsMutable();
insights_.addAll(other.insights_);
}
onChanged();
}
if (other.hasFilter()) {
mergeFilter(other.getFilter());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 32:
{
int tmpRaw = input.readEnum();
ensureInsightsIsMutable();
insights_.add(tmpRaw);
break;
} // case 32
case 34:
{
int length = input.readRawVarint32();
int oldLimit = input.pushLimit(length);
while (input.getBytesUntilLimit() > 0) {
int tmpRaw = input.readEnum();
ensureInsightsIsMutable();
insights_.add(tmpRaw);
}
input.popLimit(oldLimit);
break;
} // case 34
case 42:
{
input.readMessage(getFilterFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 42
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<java.lang.Integer> insights_ = java.util.Collections.emptyList();
private void ensureInsightsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
insights_ = new java.util.ArrayList<java.lang.Integer>(insights_);
bitField0_ |= 0x00000001;
}
}
/**
*
*
* <pre>
* Required. Insights to compute. Currently only INSIGHT_COUNT and
* INSIGHT_PLACES are supported.
* </pre>
*
* <code>
* repeated .google.maps.areainsights.v1.Insight insights = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return A list containing the insights.
*/
public java.util.List<com.google.maps.areainsights.v1.Insight> getInsightsList() {
return new com.google.protobuf.Internal.ListAdapter<
java.lang.Integer, com.google.maps.areainsights.v1.Insight>(
insights_, insights_converter_);
}
/**
*
*
* <pre>
* Required. Insights to compute. Currently only INSIGHT_COUNT and
* INSIGHT_PLACES are supported.
* </pre>
*
* <code>
* repeated .google.maps.areainsights.v1.Insight insights = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The count of insights.
*/
public int getInsightsCount() {
return insights_.size();
}
/**
*
*
* <pre>
* Required. Insights to compute. Currently only INSIGHT_COUNT and
* INSIGHT_PLACES are supported.
* </pre>
*
* <code>
* repeated .google.maps.areainsights.v1.Insight insights = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param index The index of the element to return.
* @return The insights at the given index.
*/
public com.google.maps.areainsights.v1.Insight getInsights(int index) {
return insights_converter_.convert(insights_.get(index));
}
/**
*
*
* <pre>
* Required. Insights to compute. Currently only INSIGHT_COUNT and
* INSIGHT_PLACES are supported.
* </pre>
*
* <code>
* repeated .google.maps.areainsights.v1.Insight insights = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param index The index to set the value at.
* @param value The insights to set.
* @return This builder for chaining.
*/
public Builder setInsights(int index, com.google.maps.areainsights.v1.Insight value) {
if (value == null) {
throw new NullPointerException();
}
ensureInsightsIsMutable();
insights_.set(index, value.getNumber());
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Insights to compute. Currently only INSIGHT_COUNT and
* INSIGHT_PLACES are supported.
* </pre>
*
* <code>
* repeated .google.maps.areainsights.v1.Insight insights = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param value The insights to add.
* @return This builder for chaining.
*/
public Builder addInsights(com.google.maps.areainsights.v1.Insight value) {
if (value == null) {
throw new NullPointerException();
}
ensureInsightsIsMutable();
insights_.add(value.getNumber());
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Insights to compute. Currently only INSIGHT_COUNT and
* INSIGHT_PLACES are supported.
* </pre>
*
* <code>
* repeated .google.maps.areainsights.v1.Insight insights = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param values The insights to add.
* @return This builder for chaining.
*/
public Builder addAllInsights(
java.lang.Iterable<? extends com.google.maps.areainsights.v1.Insight> values) {
ensureInsightsIsMutable();
for (com.google.maps.areainsights.v1.Insight value : values) {
insights_.add(value.getNumber());
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Insights to compute. Currently only INSIGHT_COUNT and
* INSIGHT_PLACES are supported.
* </pre>
*
* <code>
* repeated .google.maps.areainsights.v1.Insight insights = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearInsights() {
insights_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Insights to compute. Currently only INSIGHT_COUNT and
* INSIGHT_PLACES are supported.
* </pre>
*
* <code>
* repeated .google.maps.areainsights.v1.Insight insights = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return A list containing the enum numeric values on the wire for insights.
*/
public java.util.List<java.lang.Integer> getInsightsValueList() {
return java.util.Collections.unmodifiableList(insights_);
}
/**
*
*
* <pre>
* Required. Insights to compute. Currently only INSIGHT_COUNT and
* INSIGHT_PLACES are supported.
* </pre>
*
* <code>
* repeated .google.maps.areainsights.v1.Insight insights = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param index The index of the value to return.
* @return The enum numeric value on the wire of insights at the given index.
*/
public int getInsightsValue(int index) {
return insights_.get(index);
}
/**
*
*
* <pre>
* Required. Insights to compute. Currently only INSIGHT_COUNT and
* INSIGHT_PLACES are supported.
* </pre>
*
* <code>
* repeated .google.maps.areainsights.v1.Insight insights = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param index The index to set the value at.
* @param value The enum numeric value on the wire for insights to set.
* @return This builder for chaining.
*/
public Builder setInsightsValue(int index, int value) {
ensureInsightsIsMutable();
insights_.set(index, value);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Insights to compute. Currently only INSIGHT_COUNT and
* INSIGHT_PLACES are supported.
* </pre>
*
* <code>
* repeated .google.maps.areainsights.v1.Insight insights = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param value The enum numeric value on the wire for insights to add.
* @return This builder for chaining.
*/
public Builder addInsightsValue(int value) {
ensureInsightsIsMutable();
insights_.add(value);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Insights to compute. Currently only INSIGHT_COUNT and
* INSIGHT_PLACES are supported.
* </pre>
*
* <code>
* repeated .google.maps.areainsights.v1.Insight insights = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param values The enum numeric values on the wire for insights to add.
* @return This builder for chaining.
*/
public Builder addAllInsightsValue(java.lang.Iterable<java.lang.Integer> values) {
ensureInsightsIsMutable();
for (int value : values) {
insights_.add(value);
}
onChanged();
return this;
}
private com.google.maps.areainsights.v1.Filter filter_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.maps.areainsights.v1.Filter,
com.google.maps.areainsights.v1.Filter.Builder,
com.google.maps.areainsights.v1.FilterOrBuilder>
filterBuilder_;
/**
*
*
* <pre>
* Required. Insight filter.
* </pre>
*
* <code>
* .google.maps.areainsights.v1.Filter filter = 5 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the filter field is set.
*/
public boolean hasFilter() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Insight filter.
* </pre>
*
* <code>
* .google.maps.areainsights.v1.Filter filter = 5 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The filter.
*/
public com.google.maps.areainsights.v1.Filter getFilter() {
if (filterBuilder_ == null) {
return filter_ == null
? com.google.maps.areainsights.v1.Filter.getDefaultInstance()
: filter_;
} else {
return filterBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Insight filter.
* </pre>
*
* <code>
* .google.maps.areainsights.v1.Filter filter = 5 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setFilter(com.google.maps.areainsights.v1.Filter value) {
if (filterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
} else {
filterBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Insight filter.
* </pre>
*
* <code>
* .google.maps.areainsights.v1.Filter filter = 5 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setFilter(com.google.maps.areainsights.v1.Filter.Builder builderForValue) {
if (filterBuilder_ == null) {
filter_ = builderForValue.build();
} else {
filterBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Insight filter.
* </pre>
*
* <code>
* .google.maps.areainsights.v1.Filter filter = 5 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeFilter(com.google.maps.areainsights.v1.Filter value) {
if (filterBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& filter_ != null
&& filter_ != com.google.maps.areainsights.v1.Filter.getDefaultInstance()) {
getFilterBuilder().mergeFrom(value);
} else {
filter_ = value;
}
} else {
filterBuilder_.mergeFrom(value);
}
if (filter_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Insight filter.
* </pre>
*
* <code>
* .google.maps.areainsights.v1.Filter filter = 5 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearFilter() {
bitField0_ = (bitField0_ & ~0x00000002);
filter_ = null;
if (filterBuilder_ != null) {
filterBuilder_.dispose();
filterBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Insight filter.
* </pre>
*
* <code>
* .google.maps.areainsights.v1.Filter filter = 5 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.maps.areainsights.v1.Filter.Builder getFilterBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getFilterFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Insight filter.
* </pre>
*
* <code>
* .google.maps.areainsights.v1.Filter filter = 5 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.maps.areainsights.v1.FilterOrBuilder getFilterOrBuilder() {
if (filterBuilder_ != null) {
return filterBuilder_.getMessageOrBuilder();
} else {
return filter_ == null
? com.google.maps.areainsights.v1.Filter.getDefaultInstance()
: filter_;
}
}
/**
*
*
* <pre>
* Required. Insight filter.
* </pre>
*
* <code>
* .google.maps.areainsights.v1.Filter filter = 5 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.maps.areainsights.v1.Filter,
com.google.maps.areainsights.v1.Filter.Builder,
com.google.maps.areainsights.v1.FilterOrBuilder>
getFilterFieldBuilder() {
if (filterBuilder_ == null) {
filterBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.maps.areainsights.v1.Filter,
com.google.maps.areainsights.v1.Filter.Builder,
com.google.maps.areainsights.v1.FilterOrBuilder>(
getFilter(), getParentForChildren(), isClean());
filter_ = null;
}
return filterBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.maps.areainsights.v1.ComputeInsightsRequest)
}
// @@protoc_insertion_point(class_scope:google.maps.areainsights.v1.ComputeInsightsRequest)
private static final com.google.maps.areainsights.v1.ComputeInsightsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.maps.areainsights.v1.ComputeInsightsRequest();
}
public static com.google.maps.areainsights.v1.ComputeInsightsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ComputeInsightsRequest> PARSER =
new com.google.protobuf.AbstractParser<ComputeInsightsRequest>() {
@java.lang.Override
public ComputeInsightsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ComputeInsightsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ComputeInsightsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.maps.areainsights.v1.ComputeInsightsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,819 | java-aiplatform/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1beta1/DeploymentResourcePoolServiceClientTest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.aiplatform.v1beta1;
import static com.google.cloud.aiplatform.v1beta1.DeploymentResourcePoolServiceClient.ListDeploymentResourcePoolsPagedResponse;
import static com.google.cloud.aiplatform.v1beta1.DeploymentResourcePoolServiceClient.ListLocationsPagedResponse;
import static com.google.cloud.aiplatform.v1beta1.DeploymentResourcePoolServiceClient.QueryDeployedModelsPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.Lists;
import com.google.iam.v1.AuditConfig;
import com.google.iam.v1.Binding;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.GetPolicyOptions;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.longrunning.Operation;
import com.google.protobuf.AbstractMessage;
import com.google.protobuf.Any;
import com.google.protobuf.ByteString;
import com.google.protobuf.Empty;
import com.google.protobuf.FieldMask;
import com.google.protobuf.Timestamp;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class DeploymentResourcePoolServiceClientTest {
private static MockDeploymentResourcePoolService mockDeploymentResourcePoolService;
private static MockIAMPolicy mockIAMPolicy;
private static MockLocations mockLocations;
private static MockServiceHelper mockServiceHelper;
private LocalChannelProvider channelProvider;
private DeploymentResourcePoolServiceClient client;
@BeforeClass
public static void startStaticServer() {
mockDeploymentResourcePoolService = new MockDeploymentResourcePoolService();
mockLocations = new MockLocations();
mockIAMPolicy = new MockIAMPolicy();
mockServiceHelper =
new MockServiceHelper(
UUID.randomUUID().toString(),
Arrays.<MockGrpcService>asList(
mockDeploymentResourcePoolService, mockLocations, mockIAMPolicy));
mockServiceHelper.start();
}
@AfterClass
public static void stopServer() {
mockServiceHelper.stop();
}
@Before
public void setUp() throws IOException {
mockServiceHelper.reset();
channelProvider = mockServiceHelper.createChannelProvider();
DeploymentResourcePoolServiceSettings settings =
DeploymentResourcePoolServiceSettings.newBuilder()
.setTransportChannelProvider(channelProvider)
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = DeploymentResourcePoolServiceClient.create(settings);
}
@After
public void tearDown() throws Exception {
client.close();
}
@Test
public void createDeploymentResourcePoolTest() throws Exception {
DeploymentResourcePool expectedResponse =
DeploymentResourcePool.newBuilder()
.setName(
DeploymentResourcePoolName.of(
"[PROJECT]", "[LOCATION]", "[DEPLOYMENT_RESOURCE_POOL]")
.toString())
.setDedicatedResources(DedicatedResources.newBuilder().build())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.setDisableContainerLogging(true)
.setCreateTime(Timestamp.newBuilder().build())
.setSatisfiesPzs(true)
.setSatisfiesPzi(true)
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createDeploymentResourcePoolTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDeploymentResourcePoolService.addResponse(resultOperation);
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
DeploymentResourcePool deploymentResourcePool = DeploymentResourcePool.newBuilder().build();
String deploymentResourcePoolId = "deploymentResourcePoolId1805697578";
DeploymentResourcePool actualResponse =
client
.createDeploymentResourcePoolAsync(
parent, deploymentResourcePool, deploymentResourcePoolId)
.get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDeploymentResourcePoolService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateDeploymentResourcePoolRequest actualRequest =
((CreateDeploymentResourcePoolRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertEquals(deploymentResourcePool, actualRequest.getDeploymentResourcePool());
Assert.assertEquals(deploymentResourcePoolId, actualRequest.getDeploymentResourcePoolId());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createDeploymentResourcePoolExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDeploymentResourcePoolService.addException(exception);
try {
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
DeploymentResourcePool deploymentResourcePool = DeploymentResourcePool.newBuilder().build();
String deploymentResourcePoolId = "deploymentResourcePoolId1805697578";
client
.createDeploymentResourcePoolAsync(
parent, deploymentResourcePool, deploymentResourcePoolId)
.get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void createDeploymentResourcePoolTest2() throws Exception {
DeploymentResourcePool expectedResponse =
DeploymentResourcePool.newBuilder()
.setName(
DeploymentResourcePoolName.of(
"[PROJECT]", "[LOCATION]", "[DEPLOYMENT_RESOURCE_POOL]")
.toString())
.setDedicatedResources(DedicatedResources.newBuilder().build())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.setDisableContainerLogging(true)
.setCreateTime(Timestamp.newBuilder().build())
.setSatisfiesPzs(true)
.setSatisfiesPzi(true)
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createDeploymentResourcePoolTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDeploymentResourcePoolService.addResponse(resultOperation);
String parent = "parent-995424086";
DeploymentResourcePool deploymentResourcePool = DeploymentResourcePool.newBuilder().build();
String deploymentResourcePoolId = "deploymentResourcePoolId1805697578";
DeploymentResourcePool actualResponse =
client
.createDeploymentResourcePoolAsync(
parent, deploymentResourcePool, deploymentResourcePoolId)
.get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDeploymentResourcePoolService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateDeploymentResourcePoolRequest actualRequest =
((CreateDeploymentResourcePoolRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertEquals(deploymentResourcePool, actualRequest.getDeploymentResourcePool());
Assert.assertEquals(deploymentResourcePoolId, actualRequest.getDeploymentResourcePoolId());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createDeploymentResourcePoolExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDeploymentResourcePoolService.addException(exception);
try {
String parent = "parent-995424086";
DeploymentResourcePool deploymentResourcePool = DeploymentResourcePool.newBuilder().build();
String deploymentResourcePoolId = "deploymentResourcePoolId1805697578";
client
.createDeploymentResourcePoolAsync(
parent, deploymentResourcePool, deploymentResourcePoolId)
.get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void getDeploymentResourcePoolTest() throws Exception {
DeploymentResourcePool expectedResponse =
DeploymentResourcePool.newBuilder()
.setName(
DeploymentResourcePoolName.of(
"[PROJECT]", "[LOCATION]", "[DEPLOYMENT_RESOURCE_POOL]")
.toString())
.setDedicatedResources(DedicatedResources.newBuilder().build())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.setDisableContainerLogging(true)
.setCreateTime(Timestamp.newBuilder().build())
.setSatisfiesPzs(true)
.setSatisfiesPzi(true)
.build();
mockDeploymentResourcePoolService.addResponse(expectedResponse);
DeploymentResourcePoolName name =
DeploymentResourcePoolName.of("[PROJECT]", "[LOCATION]", "[DEPLOYMENT_RESOURCE_POOL]");
DeploymentResourcePool actualResponse = client.getDeploymentResourcePool(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDeploymentResourcePoolService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetDeploymentResourcePoolRequest actualRequest =
((GetDeploymentResourcePoolRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getDeploymentResourcePoolExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDeploymentResourcePoolService.addException(exception);
try {
DeploymentResourcePoolName name =
DeploymentResourcePoolName.of("[PROJECT]", "[LOCATION]", "[DEPLOYMENT_RESOURCE_POOL]");
client.getDeploymentResourcePool(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getDeploymentResourcePoolTest2() throws Exception {
DeploymentResourcePool expectedResponse =
DeploymentResourcePool.newBuilder()
.setName(
DeploymentResourcePoolName.of(
"[PROJECT]", "[LOCATION]", "[DEPLOYMENT_RESOURCE_POOL]")
.toString())
.setDedicatedResources(DedicatedResources.newBuilder().build())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.setDisableContainerLogging(true)
.setCreateTime(Timestamp.newBuilder().build())
.setSatisfiesPzs(true)
.setSatisfiesPzi(true)
.build();
mockDeploymentResourcePoolService.addResponse(expectedResponse);
String name = "name3373707";
DeploymentResourcePool actualResponse = client.getDeploymentResourcePool(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDeploymentResourcePoolService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetDeploymentResourcePoolRequest actualRequest =
((GetDeploymentResourcePoolRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getDeploymentResourcePoolExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDeploymentResourcePoolService.addException(exception);
try {
String name = "name3373707";
client.getDeploymentResourcePool(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listDeploymentResourcePoolsTest() throws Exception {
DeploymentResourcePool responsesElement = DeploymentResourcePool.newBuilder().build();
ListDeploymentResourcePoolsResponse expectedResponse =
ListDeploymentResourcePoolsResponse.newBuilder()
.setNextPageToken("")
.addAllDeploymentResourcePools(Arrays.asList(responsesElement))
.build();
mockDeploymentResourcePoolService.addResponse(expectedResponse);
ProjectName parent = ProjectName.of("[PROJECT]");
ListDeploymentResourcePoolsPagedResponse pagedListResponse =
client.listDeploymentResourcePools(parent);
List<DeploymentResourcePool> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getDeploymentResourcePoolsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockDeploymentResourcePoolService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListDeploymentResourcePoolsRequest actualRequest =
((ListDeploymentResourcePoolsRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listDeploymentResourcePoolsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDeploymentResourcePoolService.addException(exception);
try {
ProjectName parent = ProjectName.of("[PROJECT]");
client.listDeploymentResourcePools(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listDeploymentResourcePoolsTest2() throws Exception {
DeploymentResourcePool responsesElement = DeploymentResourcePool.newBuilder().build();
ListDeploymentResourcePoolsResponse expectedResponse =
ListDeploymentResourcePoolsResponse.newBuilder()
.setNextPageToken("")
.addAllDeploymentResourcePools(Arrays.asList(responsesElement))
.build();
mockDeploymentResourcePoolService.addResponse(expectedResponse);
String parent = "parent-995424086";
ListDeploymentResourcePoolsPagedResponse pagedListResponse =
client.listDeploymentResourcePools(parent);
List<DeploymentResourcePool> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getDeploymentResourcePoolsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockDeploymentResourcePoolService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListDeploymentResourcePoolsRequest actualRequest =
((ListDeploymentResourcePoolsRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listDeploymentResourcePoolsExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDeploymentResourcePoolService.addException(exception);
try {
String parent = "parent-995424086";
client.listDeploymentResourcePools(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void updateDeploymentResourcePoolTest() throws Exception {
DeploymentResourcePool expectedResponse =
DeploymentResourcePool.newBuilder()
.setName(
DeploymentResourcePoolName.of(
"[PROJECT]", "[LOCATION]", "[DEPLOYMENT_RESOURCE_POOL]")
.toString())
.setDedicatedResources(DedicatedResources.newBuilder().build())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.setDisableContainerLogging(true)
.setCreateTime(Timestamp.newBuilder().build())
.setSatisfiesPzs(true)
.setSatisfiesPzi(true)
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("updateDeploymentResourcePoolTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDeploymentResourcePoolService.addResponse(resultOperation);
DeploymentResourcePool deploymentResourcePool = DeploymentResourcePool.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
DeploymentResourcePool actualResponse =
client.updateDeploymentResourcePoolAsync(deploymentResourcePool, updateMask).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDeploymentResourcePoolService.getRequests();
Assert.assertEquals(1, actualRequests.size());
UpdateDeploymentResourcePoolRequest actualRequest =
((UpdateDeploymentResourcePoolRequest) actualRequests.get(0));
Assert.assertEquals(deploymentResourcePool, actualRequest.getDeploymentResourcePool());
Assert.assertEquals(updateMask, actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void updateDeploymentResourcePoolExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDeploymentResourcePoolService.addException(exception);
try {
DeploymentResourcePool deploymentResourcePool = DeploymentResourcePool.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateDeploymentResourcePoolAsync(deploymentResourcePool, updateMask).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void deleteDeploymentResourcePoolTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("deleteDeploymentResourcePoolTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDeploymentResourcePoolService.addResponse(resultOperation);
DeploymentResourcePoolName name =
DeploymentResourcePoolName.of("[PROJECT]", "[LOCATION]", "[DEPLOYMENT_RESOURCE_POOL]");
client.deleteDeploymentResourcePoolAsync(name).get();
List<AbstractMessage> actualRequests = mockDeploymentResourcePoolService.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteDeploymentResourcePoolRequest actualRequest =
((DeleteDeploymentResourcePoolRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteDeploymentResourcePoolExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDeploymentResourcePoolService.addException(exception);
try {
DeploymentResourcePoolName name =
DeploymentResourcePoolName.of("[PROJECT]", "[LOCATION]", "[DEPLOYMENT_RESOURCE_POOL]");
client.deleteDeploymentResourcePoolAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void deleteDeploymentResourcePoolTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("deleteDeploymentResourcePoolTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDeploymentResourcePoolService.addResponse(resultOperation);
String name = "name3373707";
client.deleteDeploymentResourcePoolAsync(name).get();
List<AbstractMessage> actualRequests = mockDeploymentResourcePoolService.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteDeploymentResourcePoolRequest actualRequest =
((DeleteDeploymentResourcePoolRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteDeploymentResourcePoolExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDeploymentResourcePoolService.addException(exception);
try {
String name = "name3373707";
client.deleteDeploymentResourcePoolAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void queryDeployedModelsTest() throws Exception {
DeployedModel responsesElement = DeployedModel.newBuilder().build();
QueryDeployedModelsResponse expectedResponse =
QueryDeployedModelsResponse.newBuilder()
.setNextPageToken("")
.addAllDeployedModels(Arrays.asList(responsesElement))
.build();
mockDeploymentResourcePoolService.addResponse(expectedResponse);
String deploymentResourcePool = "deploymentResourcePool-1928845137";
QueryDeployedModelsPagedResponse pagedListResponse =
client.queryDeployedModels(deploymentResourcePool);
List<DeployedModel> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getDeployedModelsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockDeploymentResourcePoolService.getRequests();
Assert.assertEquals(1, actualRequests.size());
QueryDeployedModelsRequest actualRequest = ((QueryDeployedModelsRequest) actualRequests.get(0));
Assert.assertEquals(deploymentResourcePool, actualRequest.getDeploymentResourcePool());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void queryDeployedModelsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDeploymentResourcePoolService.addException(exception);
try {
String deploymentResourcePool = "deploymentResourcePool-1928845137";
client.queryDeployedModels(deploymentResourcePool);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listLocationsTest() throws Exception {
Location responsesElement = Location.newBuilder().build();
ListLocationsResponse expectedResponse =
ListLocationsResponse.newBuilder()
.setNextPageToken("")
.addAllLocations(Arrays.asList(responsesElement))
.build();
mockLocations.addResponse(expectedResponse);
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
ListLocationsPagedResponse pagedListResponse = client.listLocations(request);
List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListLocationsRequest actualRequest = ((ListLocationsRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertEquals(request.getFilter(), actualRequest.getFilter());
Assert.assertEquals(request.getPageSize(), actualRequest.getPageSize());
Assert.assertEquals(request.getPageToken(), actualRequest.getPageToken());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listLocationsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
client.listLocations(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getLocationTest() throws Exception {
Location expectedResponse =
Location.newBuilder()
.setName("name3373707")
.setLocationId("locationId1541836720")
.setDisplayName("displayName1714148973")
.putAllLabels(new HashMap<String, String>())
.setMetadata(Any.newBuilder().build())
.build();
mockLocations.addResponse(expectedResponse);
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
Location actualResponse = client.getLocation(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetLocationRequest actualRequest = ((GetLocationRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getLocationExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
client.getLocation(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void setIamPolicyTest() throws Exception {
Policy expectedResponse =
Policy.newBuilder()
.setVersion(351608024)
.addAllBindings(new ArrayList<Binding>())
.addAllAuditConfigs(new ArrayList<AuditConfig>())
.setEtag(ByteString.EMPTY)
.build();
mockIAMPolicy.addResponse(expectedResponse);
SetIamPolicyRequest request =
SetIamPolicyRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.setPolicy(Policy.newBuilder().build())
.setUpdateMask(FieldMask.newBuilder().build())
.build();
Policy actualResponse = client.setIamPolicy(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
SetIamPolicyRequest actualRequest = ((SetIamPolicyRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getPolicy(), actualRequest.getPolicy());
Assert.assertEquals(request.getUpdateMask(), actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void setIamPolicyExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
SetIamPolicyRequest request =
SetIamPolicyRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName(
"[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.setPolicy(Policy.newBuilder().build())
.setUpdateMask(FieldMask.newBuilder().build())
.build();
client.setIamPolicy(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getIamPolicyTest() throws Exception {
Policy expectedResponse =
Policy.newBuilder()
.setVersion(351608024)
.addAllBindings(new ArrayList<Binding>())
.addAllAuditConfigs(new ArrayList<AuditConfig>())
.setEtag(ByteString.EMPTY)
.build();
mockIAMPolicy.addResponse(expectedResponse);
GetIamPolicyRequest request =
GetIamPolicyRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.setOptions(GetPolicyOptions.newBuilder().build())
.build();
Policy actualResponse = client.getIamPolicy(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetIamPolicyRequest actualRequest = ((GetIamPolicyRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getOptions(), actualRequest.getOptions());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getIamPolicyExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
GetIamPolicyRequest request =
GetIamPolicyRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName(
"[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.setOptions(GetPolicyOptions.newBuilder().build())
.build();
client.getIamPolicy(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void testIamPermissionsTest() throws Exception {
TestIamPermissionsResponse expectedResponse =
TestIamPermissionsResponse.newBuilder().addAllPermissions(new ArrayList<String>()).build();
mockIAMPolicy.addResponse(expectedResponse);
TestIamPermissionsRequest request =
TestIamPermissionsRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.addAllPermissions(new ArrayList<String>())
.build();
TestIamPermissionsResponse actualResponse = client.testIamPermissions(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
TestIamPermissionsRequest actualRequest = ((TestIamPermissionsRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getPermissionsList(), actualRequest.getPermissionsList());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void testIamPermissionsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
TestIamPermissionsRequest request =
TestIamPermissionsRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName(
"[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.addAllPermissions(new ArrayList<String>())
.build();
client.testIamPermissions(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
|
apache/grails-core | 37,700 | grails-gradle/model/src/main/groovy/org/grails/io/support/GrailsResourceUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.grails.io.support;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import groovy.lang.Closure;
import groovy.util.ConfigObject;
import org.codehaus.groovy.runtime.DefaultGroovyMethods;
import grails.util.BuildSettings;
/**
* Utility methods for resource handling / figuring out class names.
*
* @author Graeme Rocher
* @author Juergen Hoeller
* @since 2.0
*/
public class GrailsResourceUtils {
public static final String CLASS_EXTENSION = ".class";
private static final String WINDOWS_FOLDER_SEPARATOR = "\\";
private static final String TOP_PATH = "..";
private static final String CURRENT_PATH = ".";
private static final String FOLDER_SEPARATOR = "/";
public static final String JAR_URL_SEPARATOR = "!/";
/** Pseudo URL prefix for loading from the class path: "classpath:" */
public static final String CLASSPATH_URL_PREFIX = "classpath:";
/** URL prefix for loading from the file system: "file:" */
public static final String FILE_URL_PREFIX = "file:";
/** URL protocol for a file in the file system: "file" */
public static final String URL_PROTOCOL_FILE = "file";
/** URL protocol for an entry from a jar file: "jar" */
public static final String URL_PROTOCOL_JAR = "jar";
/** URL protocol for an entry from a zip file: "zip" */
public static final String URL_PROTOCOL_ZIP = "zip";
/** URL protocol for an entry from a JBoss jar file: "vfszip" */
public static final String URL_PROTOCOL_VFSZIP = "vfszip";
/** URL protocol for a JBoss VFS resource: "vfs" */
public static final String URL_PROTOCOL_VFS = "vfs";
/** URL protocol for an entry from a WebSphere jar file: "wsjar" */
public static final String URL_PROTOCOL_WSJAR = "wsjar";
/** URL protocol for an entry from an OC4J jar file: "code-source" */
public static final String URL_PROTOCOL_CODE_SOURCE = "code-source";
/**
* The relative path to the WEB-INF directory
*/
public static final String WEB_INF = "/WEB-INF";
/**
* The name of the Grails application directory
*/
public static final String GRAILS_APP_DIR = "grails-app";
/**
* The name of the Web app dir within Grails
*/
public static final String WEB_APP_DIR = "web-app";
/**
* The path to the views directory
*/
public static final String VIEWS_DIR_PATH = GRAILS_APP_DIR + "/views/";
/**
* The path to the views directory without a trailing slash
*/
public static final String VIEWS_DIR_PATH_NO_SLASH = GRAILS_APP_DIR + "/views";
public static final String DOMAIN_DIR_PATH = GRAILS_APP_DIR + "/domain/";
public static final String REGEX_FILE_SEPARATOR = "[\\\\/]"; // backslashes need escaping in regexes
/*
Domain path is always matched against the normalized File representation of an URL and
can therefore work with slashes as separators.
*/
public static Pattern DOMAIN_PATH_PATTERN = Pattern.compile(".+" + REGEX_FILE_SEPARATOR + GRAILS_APP_DIR + REGEX_FILE_SEPARATOR + "domain" + REGEX_FILE_SEPARATOR + "(.+)\\.(groovy|java)");
/*
This pattern will match any resource within a given directory inside grails-app
*/
public static Pattern RESOURCE_PATH_PATTERN = Pattern.compile(".+?" + REGEX_FILE_SEPARATOR + GRAILS_APP_DIR + REGEX_FILE_SEPARATOR + "(.+?)" + REGEX_FILE_SEPARATOR + "(.+?\\.(groovy|java))");
public static Pattern SPRING_SCRIPTS_PATH_PATTERN = Pattern.compile(".+?" + REGEX_FILE_SEPARATOR + GRAILS_APP_DIR + REGEX_FILE_SEPARATOR + "conf" + REGEX_FILE_SEPARATOR + "spring" + REGEX_FILE_SEPARATOR + "(.+?\\.groovy)");
public static Pattern[] COMPILER_ROOT_PATTERNS = {
SPRING_SCRIPTS_PATH_PATTERN,
RESOURCE_PATH_PATTERN
};
/*
Resources are resolved against the platform specific path and must therefore obey the
specific File.separator.
*/
public static final Pattern GRAILS_RESOURCE_PATTERN_FIRST_MATCH;
public static final Pattern GRAILS_RESOURCE_PATTERN_SECOND_MATCH;
public static final Pattern GRAILS_RESOURCE_PATTERN_THIRD_MATCH;
public static final Pattern GRAILS_RESOURCE_PATTERN_FOURTH_MATCH;
public static final Pattern GRAILS_RESOURCE_PATTERN_FIFTH_MATCH;
public static final Pattern GRAILS_RESOURCE_PATTERN_SIXTH_MATCH;
public static final Pattern GRAILS_RESOURCE_PATTERN_SEVENTH_MATCH;
public static final Pattern GRAILS_RESOURCE_PATTERN_EIGHTH_MATCH;
public static final Pattern GRAILS_RESOURCE_PATTERN_NINTH_MATCH;
public static final Pattern GRAILS_RESOURCE_PATTERN_TENTH_MATCH;
public static final Pattern GRAILS_RESOURCE_PATTERN_ELEVENTH_MATCH;
static {
String fs = REGEX_FILE_SEPARATOR;
GRAILS_RESOURCE_PATTERN_FIRST_MATCH = Pattern.compile(createGrailsResourcePattern(fs, GRAILS_APP_DIR + fs + "conf" + fs + "spring"));
GRAILS_RESOURCE_PATTERN_THIRD_MATCH = Pattern.compile(createGrailsResourcePattern(fs, GRAILS_APP_DIR + fs + "[\\w-]+"));
GRAILS_RESOURCE_PATTERN_SEVENTH_MATCH = Pattern.compile(createGrailsResourcePattern(fs, "src" + fs + "main" + fs + "java"));
GRAILS_RESOURCE_PATTERN_EIGHTH_MATCH = Pattern.compile(createGrailsResourcePattern(fs, "src" + fs + "main" + fs + "groovy"));
GRAILS_RESOURCE_PATTERN_NINTH_MATCH = Pattern.compile(createGrailsResourcePattern(fs, "src" + fs + "test" + fs + "groovy"));
GRAILS_RESOURCE_PATTERN_TENTH_MATCH = Pattern.compile(createGrailsResourcePattern(fs, "src" + fs + "test" + fs + "java"));
GRAILS_RESOURCE_PATTERN_ELEVENTH_MATCH = Pattern.compile(createGrailsResourcePattern(fs, "src" + fs + "test" + fs + "functional"));
GRAILS_RESOURCE_PATTERN_FIFTH_MATCH = Pattern.compile(createGrailsResourcePattern(fs, "grails-tests"));
fs = "/";
GRAILS_RESOURCE_PATTERN_SECOND_MATCH = Pattern.compile(createGrailsResourcePattern(fs, GRAILS_APP_DIR + fs + "conf" + fs + "spring"));
GRAILS_RESOURCE_PATTERN_FOURTH_MATCH = Pattern.compile(createGrailsResourcePattern(fs, GRAILS_APP_DIR + fs + "[\\w-]+"));
GRAILS_RESOURCE_PATTERN_SIXTH_MATCH = Pattern.compile(createGrailsResourcePattern(fs, "grails-tests"));
}
public static final Pattern[] patterns = new Pattern[]{
GRAILS_RESOURCE_PATTERN_FIRST_MATCH,
GRAILS_RESOURCE_PATTERN_THIRD_MATCH,
GRAILS_RESOURCE_PATTERN_SEVENTH_MATCH,
GRAILS_RESOURCE_PATTERN_EIGHTH_MATCH,
GRAILS_RESOURCE_PATTERN_FOURTH_MATCH,
GRAILS_RESOURCE_PATTERN_FIFTH_MATCH,
GRAILS_RESOURCE_PATTERN_SIXTH_MATCH,
GRAILS_RESOURCE_PATTERN_NINTH_MATCH,
GRAILS_RESOURCE_PATTERN_TENTH_MATCH,
GRAILS_RESOURCE_PATTERN_ELEVENTH_MATCH
};
public static final Pattern[] grailsAppResourcePatterns = new Pattern[]{
GRAILS_RESOURCE_PATTERN_FIRST_MATCH,
GRAILS_RESOURCE_PATTERN_THIRD_MATCH,
GRAILS_RESOURCE_PATTERN_FOURTH_MATCH,
GRAILS_RESOURCE_PATTERN_FIFTH_MATCH,
GRAILS_RESOURCE_PATTERN_SIXTH_MATCH,
GRAILS_RESOURCE_PATTERN_ELEVENTH_MATCH
};
private static Map<String, Boolean> KNOWN_PATHS = new LinkedHashMap<>() {
@Override
protected boolean removeEldestEntry(Map.Entry eldest) {
return this.size() > 100;
}
};
private static Map<String, Boolean> KNOWN_DOMAIN_CLASSES = DefaultGroovyMethods.withDefault(new LinkedHashMap<String, Boolean>() {
@Override
protected boolean removeEldestEntry(Map.Entry<String, Boolean> eldest) {
return this.size() > 100;
}
}, new Closure(GrailsResourceUtils.class) {
@Override
public Object call(Object... args) {
String path = args[0].toString();
return DOMAIN_PATH_PATTERN.matcher(path).find();
}
});
private static String createGrailsResourcePattern(String separator, String base) {
return ".+" + separator + base + separator + "(.+)\\.(groovy|java)$";
}
/**
* Checks whether the file referenced by the given url is a domain class
*
* @param url The URL instance
* @return true if it is a domain class
*/
public static boolean isDomainClass(URL url) {
if (url == null) return false;
return KNOWN_DOMAIN_CLASSES.get(url.getFile());
}
/**
* Extract the filename from the given path,
* e.g. "mypath/myfile.txt" -> "myfile.txt".
* @param path the file path (may be <code>null</code>)
* @return the extracted filename, or <code>null</code> if none
*/
public static String getFilename(String path) {
if (path == null) {
return null;
}
int separatorIndex = path.lastIndexOf(FOLDER_SEPARATOR);
return (separatorIndex != -1 ? path.substring(separatorIndex + 1) : path);
}
/**
* Given an input class object, return a string which consists of the
* class's package name as a pathname, i.e., all dots ('.') are replaced by
* slashes ('/'). Neither a leading nor trailing slash is added. The result
* could be concatenated with a slash and the name of a resource and fed
* directly to <code>ClassLoader.getResource()</code>. For it to be fed to
* <code>Class.getResource</code> instead, a leading slash would also have
* to be prepended to the returned value.
* @param clazz the input class. A <code>null</code> value or the default
* (empty) package will result in an empty string ("") being returned.
* @return a path which represents the package name
* @see ClassLoader#getResource
* @see Class#getResource
*/
public static String classPackageAsResourcePath(Class<?> clazz) {
if (clazz == null) {
return "";
}
String className = clazz.getName();
int packageEndIndex = className.lastIndexOf('.');
if (packageEndIndex == -1) {
return "";
}
String packageName = className.substring(0, packageEndIndex);
return packageName.replace('.', '/');
}
public static void useCachesIfNecessary(URLConnection con) {
con.setUseCaches(con.getClass().getName().startsWith("JNLP"));
}
/**
* Gets the class name of the specified Grails resource
*
* @param resource The Spring Resource
* @return The class name or null if the resource is not a Grails class
*/
public static String getClassName(Resource resource) {
try {
return getClassName(resource.getFile().getAbsolutePath());
}
catch (IOException e) {
return null;
}
}
/**
* Returns the class name for a Grails resource.
*
* @param path The path to check
* @return The class name or null if it doesn't exist
*/
public static String getClassName(String path) {
for (Pattern pattern : patterns) {
Matcher m = pattern.matcher(path);
if (m.find()) {
return m.group(1).replaceAll("[/\\\\]", ".");
}
}
return null;
}
/**
* Returns the class name for a compiled class file
*
* @param path The path to check
* @return The class name or null if it doesn't exist
*/
public static String getClassNameForClassFile(String rootDir, String path) {
path = path.replace("/", ".");
path = path.replace('\\', '.');
path = path.substring(0, path.length() - CLASS_EXTENSION.length());
if (rootDir != null) {
path = path.substring(rootDir.length());
}
return path;
}
/**
* Resolve the given resource URL to a <code>java.io.File</code>,
* i.e. to a file in the file system.
* @param resourceUrl the resource URL to resolve
* @param description a description of the original resource that
* the URL was created for (for example, a class path location)
* @return a corresponding File object
* @throws java.io.FileNotFoundException if the URL cannot be resolved to
* a file in the file system
*/
public static File getFile(URL resourceUrl, String description) throws FileNotFoundException {
if (!URL_PROTOCOL_FILE.equals(resourceUrl.getProtocol())) {
throw new FileNotFoundException(
description + " cannot be resolved to absolute file path " +
"because it does not reside in the file system: " + resourceUrl);
}
try {
return new File(toURI(resourceUrl).getSchemeSpecificPart());
}
catch (URISyntaxException ex) {
// Fallback for URLs that are not valid URIs (should hardly ever happen).
return new File(resourceUrl.getFile());
}
}
/**
* Determine whether the given URL points to a resource in a jar file,
* that is, has protocol "jar", "zip", "wsjar" or "code-source".
* <p>"zip" and "wsjar" are used by BEA WebLogic Server and IBM WebSphere, respectively,
* but can be treated like jar files. The same applies to "code-source" URLs on Oracle
* OC4J, provided that the path contains a jar separator.
* @param url the URL to check
* @return whether the URL has been identified as a JAR URL
*/
public static boolean isJarURL(URL url) {
String protocol = url.getProtocol();
return (URL_PROTOCOL_JAR.equals(protocol) ||
URL_PROTOCOL_ZIP.equals(protocol) ||
URL_PROTOCOL_WSJAR.equals(protocol) ||
(URL_PROTOCOL_CODE_SOURCE.equals(protocol) && url.getPath().contains(JAR_URL_SEPARATOR)));
}
/**
* Resolve the given resource URI to a <code>java.io.File</code>,
* i.e. to a file in the file system.
* @param resourceUri the resource URI to resolve
* @param description a description of the original resource that
* the URI was created for (for example, a class path location)
* @return a corresponding File object
* @throws FileNotFoundException if the URL cannot be resolved to
* a file in the file system
*/
public static File getFile(URI resourceUri, String description) throws FileNotFoundException {
if (!URL_PROTOCOL_FILE.equals(resourceUri.getScheme())) {
throw new FileNotFoundException(
description + " cannot be resolved to absolute file path " +
"because it does not reside in the file system: " + resourceUri);
}
return new File(resourceUri.getSchemeSpecificPart());
}
/**
* Resolve the given resource URI to a <code>java.io.File</code>,
* i.e. to a file in the file system.
* @param resourceUri the resource URI to resolve
* @return a corresponding File object
* @throws FileNotFoundException if the URL cannot be resolved to
* a file in the file system
*/
public static File getFile(URI resourceUri) throws FileNotFoundException {
return getFile(resourceUri, "URI");
}
/**
* Create a URI instance for the given URL,
* replacing spaces with "%20" quotes first.
* <p>Furthermore, this method works on JDK 1.4 as well,
* in contrast to the <code>URL.toURI()</code> method.
* @param url the URL to convert into a URI instance
* @return the URI instance
* @throws URISyntaxException if the URL wasn't a valid URI
* @see java.net.URL#toURI()
*/
public static URI toURI(URL url) throws URISyntaxException {
return toURI(url.toString());
}
/**
* Determine whether the given URL points to a resource in the file system,
* that is, has protocol "file" or "vfs".
* @param url the URL to check
* @return whether the URL has been identified as a file system URL
*/
public static boolean isFileURL(URL url) {
String protocol = url.getProtocol();
return (URL_PROTOCOL_FILE.equals(protocol) || protocol.startsWith(URL_PROTOCOL_VFS));
}
/**
* Apply the given relative path to the given path,
* assuming standard Java folder separation (i.e. "/" separators).
* @param path the path to start from (usually a full file path)
* @param relativePath the relative path to apply
* (relative to the full file path above)
* @return the full file path that results from applying the relative path
*/
public static String applyRelativePath(String path, String relativePath) {
int separatorIndex = path.lastIndexOf(FOLDER_SEPARATOR);
if (separatorIndex != -1) {
String newPath = path.substring(0, separatorIndex);
if (!relativePath.startsWith(FOLDER_SEPARATOR)) {
newPath += FOLDER_SEPARATOR;
}
return newPath + relativePath;
}
return relativePath;
}
/**
* Normalize the path by suppressing sequences like "path/.." and
* inner simple dots.
* <p>The result is convenient for path comparison. For other uses,
* notice that Windows separators ("\") are replaced by simple slashes.
* @param path the original path
* @return the normalized path
*/
public static String cleanPath(String path) {
if (path == null) {
return null;
}
String pathToUse = replace(path, WINDOWS_FOLDER_SEPARATOR, FOLDER_SEPARATOR);
// Strip prefix from path to analyze, to not treat it as part of the
// first path element. This is necessary to correctly parse paths like
// "file:core/../core/io/Resource.class", where the ".." should just
// strip the first "core" directory while keeping the "file:" prefix.
int prefixIndex = pathToUse.indexOf(":");
String prefix = "";
if (prefixIndex != -1) {
prefix = pathToUse.substring(0, prefixIndex + 1);
pathToUse = pathToUse.substring(prefixIndex + 1);
}
if (pathToUse.startsWith(FOLDER_SEPARATOR)) {
prefix = prefix + FOLDER_SEPARATOR;
pathToUse = pathToUse.substring(1);
}
String[] pathArray = delimitedListToStringArray(pathToUse, FOLDER_SEPARATOR);
List<String> pathElements = new LinkedList<>();
int tops = 0;
for (int i = pathArray.length - 1; i >= 0; i--) {
String element = pathArray[i];
if (CURRENT_PATH.equals(element)) {
// Points to current directory - drop it.
}
else if (TOP_PATH.equals(element)) {
// Registering top path found.
tops++;
}
else {
if (tops > 0) {
// Merging path element with element corresponding to top path.
tops--;
}
else {
// Normal path element found.
pathElements.add(0, element);
}
}
}
// Remaining top paths need to be retained.
for (int i = 0; i < tops; i++) {
pathElements.add(0, TOP_PATH);
}
return prefix + collectionToDelimitedString(pathElements, FOLDER_SEPARATOR);
}
private static String collectionToDelimitedString(Collection<?> coll, String delim) {
return collectionToDelimitedString(coll, delim, "", "");
}
private static String collectionToDelimitedString(Collection<?> coll, String delim, String prefix, String suffix) {
if (coll == null || coll.isEmpty()) {
return "";
}
StringBuilder sb = new StringBuilder();
Iterator<?> it = coll.iterator();
while (it.hasNext()) {
sb.append(prefix).append(it.next()).append(suffix);
if (it.hasNext()) {
sb.append(delim);
}
}
return sb.toString();
}
/**
* Take a String which is a delimited list and convert it to a String array.
* <p>A single delimiter can consists of more than one character: It will still
* be considered as single delimiter string, rather than as bunch of potential
* delimiter characters - in contrast to <code>tokenizeToStringArray</code>.
* @param str the input String
* @param delimiter the delimiter between elements (this is a single delimiter,
* rather than a bunch individual delimiter characters)
* @return an array of the tokens in the list
*/
private static String[] delimitedListToStringArray(String str, String delimiter) {
return delimitedListToStringArray(str, delimiter, null);
}
/**
* Take a String which is a delimited list and convert it to a String array.
* <p>A single delimiter can consists of more than one character: It will still
* be considered as single delimiter string, rather than as bunch of potential
* delimiter characters - in contrast to <code>tokenizeToStringArray</code>.
* @param str the input String
* @param delimiter the delimiter between elements (this is a single delimiter,
* rather than a bunch individual delimiter characters)
* @param charsToDelete a set of characters to delete. Useful for deleting unwanted
* line breaks: e.g. "\r\n\f" will delete all new lines and line feeds in a String.
* @return an array of the tokens in the list
*/
private static String[] delimitedListToStringArray(String str, String delimiter, String charsToDelete) {
if (str == null) {
return new String[0];
}
if (delimiter == null) {
return new String[] {str};
}
List<String> result = new ArrayList<>();
if ("".equals(delimiter)) {
for (int i = 0; i < str.length(); i++) {
result.add(deleteAny(str.substring(i, i + 1), charsToDelete));
}
}
else {
int pos = 0;
int delPos;
while ((delPos = str.indexOf(delimiter, pos)) != -1) {
result.add(deleteAny(str.substring(pos, delPos), charsToDelete));
pos = delPos + delimiter.length();
}
if (str.length() > 0 && pos <= str.length()) {
// Add rest of String, but not in case of empty input.
result.add(deleteAny(str.substring(pos), charsToDelete));
}
}
return toStringArray(result);
}
private static String[] toStringArray(Collection<String> collection) {
if (collection == null) {
return null;
}
return collection.toArray(new String[collection.size()]);
}
private static String deleteAny(String inString, String charsToDelete) {
if (!hasLength(inString) || !hasLength(charsToDelete)) {
return inString;
}
StringBuilder sb = new StringBuilder();
for (int i = 0; i < inString.length(); i++) {
char c = inString.charAt(i);
if (charsToDelete.indexOf(c) == -1) {
sb.append(c);
}
}
return sb.toString();
}
/**
* Replace all occurences of a substring within a string with
* another string.
* @param inString String to examine
* @param oldPattern String to replace
* @param newPattern String to insert
* @return a String with the replacements
*/
private static String replace(String inString, String oldPattern, String newPattern) {
if (!hasLength(inString) || !hasLength(oldPattern) || newPattern == null) {
return inString;
}
StringBuilder sb = new StringBuilder();
int pos = 0; // our position in the old string
int index = inString.indexOf(oldPattern);
// the index of an occurrence we've found, or -1
int patLen = oldPattern.length();
while (index >= 0) {
sb.append(inString.substring(pos, index));
sb.append(newPattern);
pos = index + patLen;
index = inString.indexOf(oldPattern, pos);
}
sb.append(inString.substring(pos));
// remember to append any characters to the right of a match
return sb.toString();
}
private static boolean hasLength(CharSequence str) {
return (str != null && str.length() > 0);
}
/**
* Extract the URL for the actual jar file from the given URL
* (which may point to a resource in a jar file or to a jar file itself).
* @param jarUrl the original URL
* @return the URL for the actual jar file
* @throws MalformedURLException if no valid jar file URL could be extracted
*/
public static URL extractJarFileURL(URL jarUrl) throws MalformedURLException {
String urlFile = jarUrl.getFile();
int separatorIndex = urlFile.indexOf(JAR_URL_SEPARATOR);
if (separatorIndex != -1) {
String jarFile = urlFile.substring(0, separatorIndex);
try {
return new URL(jarFile);
}
catch (MalformedURLException ex) {
// Probably no protocol in original jar URL, like "jar:C:/mypath/myjar.jar".
// This usually indicates that the jar file resides in the file system.
if (!jarFile.startsWith("/")) {
jarFile = "/" + jarFile;
}
return new URL(FILE_URL_PREFIX + jarFile);
}
}
return jarUrl;
}
/**
* Create a URI instance for the given location String,
* replacing spaces with "%20" quotes first.
* @param location the location String to convert into a URI instance
* @return the URI instance
* @throws URISyntaxException if the location wasn't a valid URI
*/
public static URI toURI(String location) throws URISyntaxException {
return new URI(replace(location, " ", "%20"));
}
/**
* Checks whether the specified path is a Grails path.
*
* @param path The path to check
* @return true if it is a Grails path
*/
public static boolean isGrailsPath(String path) {
if (KNOWN_PATHS.containsKey(path)) {
return KNOWN_PATHS.get(path);
}
for (Pattern grailsAppResourcePattern : grailsAppResourcePatterns) {
Matcher m = grailsAppResourcePattern.matcher(path);
if (m.find()) {
KNOWN_PATHS.put(path, true);
return true;
}
}
KNOWN_PATHS.put(path, false);
return false;
}
/**
* Checks whether the specified path is a Grails path.
*
* @param path The path to check
* @return true if it is a Grails path
*/
public static boolean isProjectSource(String path) {
for (Pattern grailsAppResourcePattern : patterns) {
Matcher m = grailsAppResourcePattern.matcher(path);
if (m.find()) {
return true;
}
}
return false;
}
/**
* Checks whether the specified path is a Grails path.
*
* @param r The resoruce to check
* @return true if it is a Grails path
*/
public static boolean isProjectSource(Resource r) {
try {
String file = r.getURL().getFile();
return isProjectSource(file) || file.endsWith("GrailsPlugin.groovy");
}
catch (IOException e) {
return false;
}
}
/**
* Checks whether the specific resources is a Grails resource. A Grails resource is a Groovy or Java class under the grails-app directory
*
* @param r The resource to check
* @return True if it is a Grails resource
*/
public static boolean isGrailsResource(Resource r) {
try {
String file = r.getURL().getFile();
return isGrailsPath(file) || file.endsWith("GrailsPlugin.groovy");
}
catch (IOException e) {
return false;
}
}
public static Resource getViewsDir(Resource resource) {
if (resource == null) return null;
Resource appDir = getAppDir(resource);
if (appDir == null) return null;
return appDir.createRelative("views");
}
public static Resource getAppDir(Resource resource) {
if (resource == null) return null;
try {
File file = resource.getFile();
while (file != null && !file.getName().equals(GRAILS_APP_DIR)) {
file = file.getParentFile();
}
if (file != null) {
return new FileSystemResource(file.getAbsolutePath() + '/');
}
} catch (IOException e) {
}
try {
String url = resource.getURL().toString();
int i = url.lastIndexOf(GRAILS_APP_DIR);
if (i > -1) {
url = url.substring(0, i + 10);
return new UrlResource(url + '/');
}
return null;
}
catch (MalformedURLException e) {
return null;
}
catch (IOException e) {
return null;
}
}
private static final Pattern PLUGIN_PATTERN = Pattern.compile(".+?(/plugins/.+?/" + GRAILS_APP_DIR + "/.+)");
/**
* Takes a Grails resource (one located inside the grails-app dir) and gets its relative path inside the WEB-INF directory
* when deployed.
*
* @param resource The Grails resource, which is a file inside the grails-app dir
* @return The relative URL of the file inside the WEB-INF dir at deployment time or null if it cannot be established
*/
public static String getRelativeInsideWebInf(Resource resource) {
if (resource == null) return null;
try {
String url = resource.getURL().toString();
int i = url.indexOf(WEB_INF);
if (i > -1) {
return url.substring(i);
}
Matcher m = PLUGIN_PATTERN.matcher(url);
if (m.find()) {
return WEB_INF + m.group(1);
}
i = url.lastIndexOf(GRAILS_APP_DIR);
if (i > -1) {
return WEB_INF + "/" + url.substring(i);
}
}
catch (IOException e) {
return null;
}
return null;
}
private static final Pattern PLUGIN_RESOURCE_PATTERN = Pattern.compile(".+?/(plugins/.+?)/" + GRAILS_APP_DIR + "/.+");
/**
* Retrieves the static resource path for the given Grails resource artifact (controller/taglib etc.)
*
* @param resource The Resource
* @param contextPath The additonal context path to prefix
* @return The resource path
*/
public static String getStaticResourcePathForResource(Resource resource, String contextPath) {
if (contextPath == null) contextPath = "";
if (resource == null) return contextPath;
String url;
try {
url = resource.getURL().toString();
}
catch (IOException e) {
return contextPath;
}
Matcher m = PLUGIN_RESOURCE_PATTERN.matcher(url);
if (m.find()) {
return (contextPath.length() > 0 ? contextPath + "/" : "") + m.group(1);
}
return contextPath;
}
/**
* Get the path relative to an artefact folder under grails-app i.e:
*
* Input: /usr/joe/project/grails-app/conf/BootStrap.groovy
* Output: BootStrap.groovy
*
* Input: /usr/joe/project/grails-app/domain/com/mystartup/Book.groovy
* Output: com/mystartup/Book.groovy
*
* @param path The path to evaluate
* @return The path relative to the root folder grails-app
*/
public static String getPathFromRoot(String path) {
for (Pattern COMPILER_ROOT_PATTERN : COMPILER_ROOT_PATTERNS) {
Matcher m = COMPILER_ROOT_PATTERN.matcher(path);
if (m.find()) {
return m.group(m.groupCount() - 1);
}
}
return null;
}
/**
* Gets the path relative to the project base directory.
*
* Input: /usr/joe/project/grails-app/conf/BootStrap.groovy
* Output: grails-app/conf/BootStrap.groovy
*
* @param path The path
* @return The path relative to the base directory or null if it can't be established
*/
public static String getPathFromBaseDir(String path) {
int i = path.indexOf("grails-app/");
if (i > -1) {
return path.substring(i + 11);
}
else {
try {
File baseDir = BuildSettings.BASE_DIR;
String basePath = baseDir != null ? baseDir.getCanonicalPath() : null;
if (basePath != null) {
String canonicalPath = new File(path).getCanonicalPath();
return canonicalPath.contains(basePath) ? canonicalPath.substring(basePath.length() + 1) : canonicalPath;
}
} catch (IOException e) {
// ignore
}
}
return null;
}
/**
* Takes a file path and returns the name of the folder under grails-app i.e:
*
* Input: /usr/joe/project/grails-app/domain/com/mystartup/Book.groovy
* Output: domain
*
* @param path The path
* @return The domain or null if not known
*/
public static String getArtefactDirectory(String path) {
if (path != null) {
final Matcher matcher = RESOURCE_PATH_PATTERN.matcher(path);
if (matcher.find()) {
return matcher.group(1);
}
}
return null;
}
/**
* Takes any number of Strings and appends them into a uri, making
* sure that a forward slash is inserted between each piece and
* making sure that no duplicate slashes are in the uri
*
* <pre>
* Input: ""
* Output: ""
*
* Input: "/alpha", "/beta", "/gamma"
* Output: "/alpha/beta/gamma
*
* Input: "/alpha/, "/beta/", "/gamma"
* Output: "/alpha/beta/gamma
*
* Input: "/alpha/", "/beta/", "/gamma/"
* Output "/alpha/beta/gamma/
*
* Input: "alpha", "beta", "gamma"
* Output: "alpha/beta/gamma
* </pre>
*
* @param pieces Strings to concatenate together into a uri
* @return a uri
*/
public static String appendPiecesForUri(String... pieces) {
if (pieces == null || pieces.length == 0) return "";
// join parts && strip double slashes
StringBuilder builder = new StringBuilder(16 * pieces.length);
char previous = 0;
for (int i = 0; i < pieces.length; i++) {
String piece = pieces[i];
if (piece != null && piece.length() > 0) {
for (int j = 0, maxlen = piece.length(); j < maxlen; j++) {
char current = piece.charAt(j);
if (!(previous == '/' && current == '/')) {
builder.append(current);
previous = current;
}
}
if (i + 1 < pieces.length && previous != '/') {
builder.append('/');
previous = '/';
}
}
}
return builder.toString();
}
@SuppressWarnings("unchecked")
public static Object instantiateFromConfig(ConfigObject config, String configKey, String defaultClassName)
throws InstantiationException, IllegalAccessException, ClassNotFoundException, LinkageError {
return instantiateFromFlatConfig(config.flatten(), configKey, defaultClassName);
}
public static Object instantiateFromFlatConfig(Map<String, Object> flatConfig, String configKey, String defaultClassName)
throws InstantiationException, IllegalAccessException, ClassNotFoundException, LinkageError {
String className = defaultClassName;
Object configName = flatConfig.get(configKey);
if (configName instanceof CharSequence) {
className = configName.toString();
}
return forName(className, DefaultResourceLoader.getDefaultClassLoader()).newInstance();
}
private static Class<?> forName(String className, ClassLoader defaultClassLoader) throws ClassNotFoundException {
return defaultClassLoader.loadClass(className);
}
}
|
google/j2objc | 38,124 | jre_emul/android/platform/libcore/ojluni/src/main/java/java/time/chrono/Chronology.java | /*
* Copyright (c) 2012, 2016, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Copyright (c) 2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package java.time.chrono;
import static java.time.temporal.ChronoField.HOUR_OF_DAY;
import static java.time.temporal.ChronoField.MINUTE_OF_HOUR;
import static java.time.temporal.ChronoField.SECOND_OF_MINUTE;
import java.time.Clock;
import java.time.DateTimeException;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalTime;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatterBuilder;
import java.time.format.ResolverStyle;
import java.time.format.TextStyle;
import java.time.temporal.ChronoField;
import java.time.temporal.TemporalAccessor;
import java.time.temporal.TemporalField;
import java.time.temporal.TemporalQueries;
import java.time.temporal.TemporalQuery;
import java.time.temporal.UnsupportedTemporalTypeException;
import java.time.temporal.ValueRange;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
/**
* A calendar system, used to organize and identify dates.
*
* <p>The main date and time API is built on the ISO calendar system. The chronology operates behind
* the scenes to represent the general concept of a calendar system. For example, the Japanese,
* Minguo, Thai Buddhist and others.
*
* <p>Most other calendar systems also operate on the shared concepts of year, month and day, linked
* to the cycles of the Earth around the Sun, and the Moon around the Earth. These shared concepts
* are defined by {@link ChronoField} and are available for use by any {@code Chronology}
* implementation:
*
* <pre>
* LocalDate isoDate = ...
* ThaiBuddhistDate thaiDate = ...
* int isoYear = isoDate.get(ChronoField.YEAR);
* int thaiYear = thaiDate.get(ChronoField.YEAR);
* </pre>
*
* As shown, although the date objects are in different calendar systems, represented by different
* {@code Chronology} instances, both can be queried using the same constant on {@code ChronoField}.
* For a full discussion of the implications of this, see {@link ChronoLocalDate}. In general, the
* advice is to use the known ISO-based {@code LocalDate}, rather than {@code ChronoLocalDate}.
*
* <p>While a {@code Chronology} object typically uses {@code ChronoField} and is based on an era,
* year-of-era, month-of-year, day-of-month model of a date, this is not required. A {@code
* Chronology} instance may represent a totally different kind of calendar system, such as the
* Mayan.
*
* <p>In practical terms, the {@code Chronology} instance also acts as a factory. The {@link
* #of(String)} method allows an instance to be looked up by identifier, while the {@link
* #ofLocale(Locale)} method allows lookup by locale.
*
* <p>The {@code Chronology} instance provides a set of methods to create {@code ChronoLocalDate}
* instances. The date classes are used to manipulate specific dates.
*
* <ul>
* <li>{@link #dateNow() dateNow()}
* <li>{@link #dateNow(Clock) dateNow(clock)}
* <li>{@link #dateNow(ZoneId) dateNow(zone)}
* <li>{@link #date(int, int, int) date(yearProleptic, month, day)}
* <li>{@link #date(Era, int, int, int) date(era, yearOfEra, month, day)}
* <li>{@link #dateYearDay(int, int) dateYearDay(yearProleptic, dayOfYear)}
* <li>{@link #dateYearDay(Era, int, int) dateYearDay(era, yearOfEra, dayOfYear)}
* <li>{@link #date(TemporalAccessor) date(TemporalAccessor)}
* </ul>
*
* <h3 id="addcalendars">Adding New Calendars</h3>
*
* The set of available chronologies can be extended by applications. Adding a new calendar system
* requires the writing of an implementation of {@code Chronology}, {@code ChronoLocalDate} and
* {@code Era}. The majority of the logic specific to the calendar system will be in the {@code
* ChronoLocalDate} implementation. The {@code Chronology} implementation acts as a factory.
*
* <p>To permit the discovery of additional chronologies, the {@link java.util.ServiceLoader
* ServiceLoader} is used. A file must be added to the {@code META-INF/services} directory with the
* name 'java.time.chrono.Chronology' listing the implementation classes. See the ServiceLoader for
* more details on service loading. For lookup by id or calendarType, the system provided calendars
* are found first followed by application provided calendars.
*
* <p>Each chronology must define a chronology ID that is unique within the system. If the
* chronology represents a calendar system defined by the CLDR specification then the calendar type
* is the concatenation of the CLDR type and, if applicable, the CLDR variant.
*
* @implSpec This interface must be implemented with care to ensure other classes operate correctly.
* All implementations that can be instantiated must be final, immutable and thread-safe.
* Subclasses should be Serializable wherever possible.
* @since 1.8
*/
public interface Chronology extends Comparable<Chronology> {
/**
* Obtains an instance of {@code Chronology} from a temporal object.
*
* <p>This obtains a chronology based on the specified temporal. A {@code TemporalAccessor}
* represents an arbitrary set of date and time information, which this factory converts to an
* instance of {@code Chronology}.
*
* <p>The conversion will obtain the chronology using {@link TemporalQueries#chronology()}. If the
* specified temporal object does not have a chronology, {@link IsoChronology} is returned.
*
* <p>This method matches the signature of the functional interface {@link TemporalQuery} allowing
* it to be used as a query via method reference, {@code Chronology::from}.
*
* @param temporal the temporal to convert, not null
* @return the chronology, not null
* @throws DateTimeException if unable to convert to a {@code Chronology}
*/
static Chronology from(TemporalAccessor temporal) {
Objects.requireNonNull(temporal, "temporal");
Chronology obj = temporal.query(TemporalQueries.chronology());
return Objects.requireNonNullElse(obj, IsoChronology.INSTANCE);
}
// -----------------------------------------------------------------------
/**
* Obtains an instance of {@code Chronology} from a locale.
*
* <p>This returns a {@code Chronology} based on the specified locale, typically returning {@code
* IsoChronology}. Other calendar systems are only returned if they are explicitly selected within
* the locale.
*
* <p>The {@link Locale} class provide access to a range of information useful for localizing an
* application. This includes the language and region, such as "en-GB" for English as used in
* Great Britain.
*
* <p>The {@code Locale} class also supports an extension mechanism that can be used to identify a
* calendar system. The mechanism is a form of key-value pairs, where the calendar system has the
* key "ca". For example, the locale "en-JP-u-ca-japanese" represents the English language as used
* in Japan with the Japanese calendar system.
*
* <p>This method finds the desired calendar system in a manner equivalent to passing "ca" to
* {@link Locale#getUnicodeLocaleType(String)}. If the "ca" key is not present, then {@code
* IsoChronology} is returned.
*
* <p>Note that the behavior of this method differs from the older {@link
* java.util.Calendar#getInstance(Locale)} method. If that method receives a locale of "th_TH" it
* will return {@code BuddhistCalendar}. By contrast, this method will return {@code
* IsoChronology}. Passing the locale "th-TH-u-ca-buddhist" into either method will result in the
* Thai Buddhist calendar system and is therefore the recommended approach going forward for Thai
* calendar system localization.
*
* <p>A similar, but simpler, situation occurs for the Japanese calendar system. The locale
* "jp_JP_JP" has previously been used to access the calendar. However, unlike the Thai locale,
* "ja_JP_JP" is automatically converted by {@code Locale} to the modern and recommended form of
* "ja-JP-u-ca-japanese". Thus, there is no difference in behavior between this method and {@code
* Calendar#getInstance(Locale)}.
*
* @param locale the locale to use to obtain the calendar system, not null
* @return the calendar system associated with the locale, not null
* @throws DateTimeException if the locale-specified calendar cannot be found
*/
static Chronology ofLocale(Locale locale) {
return AbstractChronology.ofLocale(locale);
}
//-----------------------------------------------------------------------
/**
* Obtains an instance of {@code Chronology} from a chronology ID or
* calendar system type.
* <p>
* This returns a chronology based on either the ID or the type.
* The {@link #getId() chronology ID} uniquely identifies the chronology.
* The {@link #getCalendarType() calendar system type} is defined by the
* CLDR specification.
* <p>
* The chronology may be a system chronology or a chronology
* provided by the application via ServiceLoader configuration.
* <p>
* Since some calendars can be customized, the ID or type typically refers
* to the default customization. For example, the Gregorian calendar can have multiple
* cutover dates from the Julian, but the lookup only provides the default cutover date.
*
* @param id the chronology ID or calendar system type, not null
* @return the chronology with the identifier requested, not null
* @throws DateTimeException if the chronology cannot be found
*/
static Chronology of(String id) {
return AbstractChronology.of(id);
}
/**
* Returns the available chronologies.
* <p>
* Each returned {@code Chronology} is available for use in the system.
* The set of chronologies includes the system chronologies and
* any chronologies provided by the application via ServiceLoader
* configuration.
*
* @return the independent, modifiable set of the available chronology IDs, not null
*/
static Set<Chronology> getAvailableChronologies() {
return AbstractChronology.getAvailableChronologies();
}
//-----------------------------------------------------------------------
/**
* Gets the ID of the chronology.
* <p>
* The ID uniquely identifies the {@code Chronology}.
* It can be used to lookup the {@code Chronology} using {@link #of(String)}.
*
* @return the chronology ID, not null
* @see #getCalendarType()
*/
String getId();
/**
* Gets the calendar type of the calendar system.
*
* <p>The calendar type is an identifier defined by the CLDR and <em>Unicode Locale Data Markup
* Language (LDML)</em> specifications to uniquely identify a calendar. The {@code
* getCalendarType} is the concatenation of the CLDR calendar type and the variant, if applicable,
* is appended separated by "-". The calendar type is used to lookup the {@code Chronology} using
* {@link #of(String)}.
*
* @return the calendar system type, null if the calendar is not defined by CLDR/LDML
* @see #getId()
*/
String getCalendarType();
//-----------------------------------------------------------------------
/**
* Obtains a local date in this chronology from the era, year-of-era,
* month-of-year and day-of-month fields.
*
* @implSpec
* The default implementation combines the era and year-of-era into a proleptic
* year before calling {@link #date(int, int, int)}.
*
* @param era the era of the correct type for the chronology, not null
* @param yearOfEra the chronology year-of-era
* @param month the chronology month-of-year
* @param dayOfMonth the chronology day-of-month
* @return the local date in this chronology, not null
* @throws DateTimeException if unable to create the date
* @throws ClassCastException if the {@code era} is not of the correct type for the chronology
*/
default ChronoLocalDate date(Era era, int yearOfEra, int month, int dayOfMonth) {
return date(prolepticYear(era, yearOfEra), month, dayOfMonth);
}
/**
* Obtains a local date in this chronology from the proleptic-year,
* month-of-year and day-of-month fields.
*
* @param prolepticYear the chronology proleptic-year
* @param month the chronology month-of-year
* @param dayOfMonth the chronology day-of-month
* @return the local date in this chronology, not null
* @throws DateTimeException if unable to create the date
*/
ChronoLocalDate date(int prolepticYear, int month, int dayOfMonth);
/**
* Obtains a local date in this chronology from the era, year-of-era and
* day-of-year fields.
*
* @implSpec
* The default implementation combines the era and year-of-era into a proleptic
* year before calling {@link #dateYearDay(int, int)}.
*
* @param era the era of the correct type for the chronology, not null
* @param yearOfEra the chronology year-of-era
* @param dayOfYear the chronology day-of-year
* @return the local date in this chronology, not null
* @throws DateTimeException if unable to create the date
* @throws ClassCastException if the {@code era} is not of the correct type for the chronology
*/
default ChronoLocalDate dateYearDay(Era era, int yearOfEra, int dayOfYear) {
return dateYearDay(prolepticYear(era, yearOfEra), dayOfYear);
}
/**
* Obtains a local date in this chronology from the proleptic-year and
* day-of-year fields.
*
* @param prolepticYear the chronology proleptic-year
* @param dayOfYear the chronology day-of-year
* @return the local date in this chronology, not null
* @throws DateTimeException if unable to create the date
*/
ChronoLocalDate dateYearDay(int prolepticYear, int dayOfYear);
/**
* Obtains a local date in this chronology from the epoch-day.
* <p>
* The definition of {@link ChronoField#EPOCH_DAY EPOCH_DAY} is the same
* for all calendar systems, thus it can be used for conversion.
*
* @param epochDay the epoch day
* @return the local date in this chronology, not null
* @throws DateTimeException if unable to create the date
*/
ChronoLocalDate dateEpochDay(long epochDay);
//-----------------------------------------------------------------------
/**
* Obtains the current local date in this chronology from the system clock in the default time-zone.
* <p>
* This will query the {@link Clock#systemDefaultZone() system clock} in the default
* time-zone to obtain the current date.
* <p>
* Using this method will prevent the ability to use an alternate clock for testing
* because the clock is hard-coded.
*
* @implSpec
* The default implementation invokes {@link #dateNow(Clock)}.
*
* @return the current local date using the system clock and default time-zone, not null
* @throws DateTimeException if unable to create the date
*/
default ChronoLocalDate dateNow() {
return dateNow(Clock.systemDefaultZone());
}
/**
* Obtains the current local date in this chronology from the system clock in the specified time-zone.
* <p>
* This will query the {@link Clock#system(ZoneId) system clock} to obtain the current date.
* Specifying the time-zone avoids dependence on the default time-zone.
* <p>
* Using this method will prevent the ability to use an alternate clock for testing
* because the clock is hard-coded.
*
* @implSpec
* The default implementation invokes {@link #dateNow(Clock)}.
*
* @param zone the zone ID to use, not null
* @return the current local date using the system clock, not null
* @throws DateTimeException if unable to create the date
*/
default ChronoLocalDate dateNow(ZoneId zone) {
return dateNow(Clock.system(zone));
}
/**
* Obtains the current local date in this chronology from the specified clock.
* <p>
* This will query the specified clock to obtain the current date - today.
* Using this method allows the use of an alternate clock for testing.
* The alternate clock may be introduced using {@link Clock dependency injection}.
*
* @implSpec
* The default implementation invokes {@link #date(TemporalAccessor)}.
*
* @param clock the clock to use, not null
* @return the current local date, not null
* @throws DateTimeException if unable to create the date
*/
default ChronoLocalDate dateNow(Clock clock) {
Objects.requireNonNull(clock, "clock");
return date(LocalDate.now(clock));
}
//-----------------------------------------------------------------------
/**
* Obtains a local date in this chronology from another temporal object.
* <p>
* This obtains a date in this chronology based on the specified temporal.
* A {@code TemporalAccessor} represents an arbitrary set of date and time information,
* which this factory converts to an instance of {@code ChronoLocalDate}.
* <p>
* The conversion typically uses the {@link ChronoField#EPOCH_DAY EPOCH_DAY}
* field, which is standardized across calendar systems.
* <p>
* This method matches the signature of the functional interface {@link TemporalQuery}
* allowing it to be used as a query via method reference, {@code aChronology::date}.
*
* @param temporal the temporal object to convert, not null
* @return the local date in this chronology, not null
* @throws DateTimeException if unable to create the date
* @see ChronoLocalDate#from(TemporalAccessor)
*/
ChronoLocalDate date(TemporalAccessor temporal);
/**
* Obtains a local date-time in this chronology from another temporal object.
* <p>
* This obtains a date-time in this chronology based on the specified temporal.
* A {@code TemporalAccessor} represents an arbitrary set of date and time information,
* which this factory converts to an instance of {@code ChronoLocalDateTime}.
* <p>
* The conversion extracts and combines the {@code ChronoLocalDate} and the
* {@code LocalTime} from the temporal object.
* Implementations are permitted to perform optimizations such as accessing
* those fields that are equivalent to the relevant objects.
* The result uses this chronology.
* <p>
* This method matches the signature of the functional interface {@link TemporalQuery}
* allowing it to be used as a query via method reference, {@code aChronology::localDateTime}.
*
* @param temporal the temporal object to convert, not null
* @return the local date-time in this chronology, not null
* @throws DateTimeException if unable to create the date-time
* @see ChronoLocalDateTime#from(TemporalAccessor)
*/
default ChronoLocalDateTime<? extends ChronoLocalDate> localDateTime(TemporalAccessor temporal) {
try {
return date(temporal).atTime(LocalTime.from(temporal));
} catch (DateTimeException ex) {
throw new DateTimeException(
"Unable to obtain ChronoLocalDateTime from TemporalAccessor: " + temporal.getClass(), ex);
}
}
/**
* Obtains a {@code ChronoZonedDateTime} in this chronology from another temporal object.
* <p>
* This obtains a zoned date-time in this chronology based on the specified temporal.
* A {@code TemporalAccessor} represents an arbitrary set of date and time information,
* which this factory converts to an instance of {@code ChronoZonedDateTime}.
* <p>
* The conversion will first obtain a {@code ZoneId} from the temporal object,
* falling back to a {@code ZoneOffset} if necessary. It will then try to obtain
* an {@code Instant}, falling back to a {@code ChronoLocalDateTime} if necessary.
* The result will be either the combination of {@code ZoneId} or {@code ZoneOffset}
* with {@code Instant} or {@code ChronoLocalDateTime}.
* Implementations are permitted to perform optimizations such as accessing
* those fields that are equivalent to the relevant objects.
* The result uses this chronology.
* <p>
* This method matches the signature of the functional interface {@link TemporalQuery}
* allowing it to be used as a query via method reference, {@code aChronology::zonedDateTime}.
*
* @param temporal the temporal object to convert, not null
* @return the zoned date-time in this chronology, not null
* @throws DateTimeException if unable to create the date-time
* @see ChronoZonedDateTime#from(TemporalAccessor)
*/
default ChronoZonedDateTime<? extends ChronoLocalDate> zonedDateTime(TemporalAccessor temporal) {
try {
ZoneId zone = ZoneId.from(temporal);
try {
Instant instant = Instant.from(temporal);
return zonedDateTime(instant, zone);
} catch (DateTimeException ex1) {
ChronoLocalDateTimeImpl<?> cldt = ChronoLocalDateTimeImpl.ensureValid(this, localDateTime(temporal));
return ChronoZonedDateTimeImpl.ofBest(cldt, zone, null);
}
} catch (DateTimeException ex) {
throw new DateTimeException(
"Unable to obtain ChronoZonedDateTime from TemporalAccessor: " + temporal.getClass(), ex);
}
}
/**
* Obtains a {@code ChronoZonedDateTime} in this chronology from an {@code Instant}.
* <p>
* This obtains a zoned date-time with the same instant as that specified.
*
* @param instant the instant to create the date-time from, not null
* @param zone the time-zone, not null
* @return the zoned date-time, not null
* @throws DateTimeException if the result exceeds the supported range
*/
default ChronoZonedDateTime<? extends ChronoLocalDate> zonedDateTime(Instant instant, ZoneId zone) {
return ChronoZonedDateTimeImpl.ofInstant(this, instant, zone);
}
// -----------------------------------------------------------------------
/**
* Checks if the specified year is a leap year.
*
* <p>A leap-year is a year of a longer length than normal. The exact meaning is determined by the
* chronology according to the following constraints.
*
* <ul>
* <li>a leap-year must imply a year-length longer than a non leap-year.
* <li>a chronology that does not support the concept of a year must return false.
* <li>the correct result must be returned for all years within the valid range of years for the
* chronology.
* </ul>
*
* <p>Outside the range of valid years an implementation is free to return either a best guess or
* false. An implementation must not throw an exception, even if the year is outside the range of
* valid years.
*
* @param prolepticYear the proleptic-year to check, not validated for range
* @return true if the year is a leap year
*/
boolean isLeapYear(long prolepticYear);
/**
* Calculates the proleptic-year given the era and year-of-era.
* <p>
* This combines the era and year-of-era into the single proleptic-year field.
* <p>
* If the chronology makes active use of eras, such as {@code JapaneseChronology}
* then the year-of-era will be validated against the era.
* For other chronologies, validation is optional.
*
* @param era the era of the correct type for the chronology, not null
* @param yearOfEra the chronology year-of-era
* @return the proleptic-year
* @throws DateTimeException if unable to convert to a proleptic-year,
* such as if the year is invalid for the era
* @throws ClassCastException if the {@code era} is not of the correct type for the chronology
*/
int prolepticYear(Era era, int yearOfEra);
/**
* Creates the chronology era object from the numeric value.
* <p>
* The era is, conceptually, the largest division of the time-line.
* Most calendar systems have a single epoch dividing the time-line into two eras.
* However, some have multiple eras, such as one for the reign of each leader.
* The exact meaning is determined by the chronology according to the following constraints.
* <p>
* The era in use at 1970-01-01 must have the value 1.
* Later eras must have sequentially higher values.
* Earlier eras must have sequentially lower values.
* Each chronology must refer to an enum or similar singleton to provide the era values.
* <p>
* This method returns the singleton era of the correct type for the specified era value.
*
* @param eraValue the era value
* @return the calendar system era, not null
* @throws DateTimeException if unable to create the era
*/
Era eraOf(int eraValue);
/**
* Gets the list of eras for the chronology.
* <p>
* Most calendar systems have an era, within which the year has meaning.
* If the calendar system does not support the concept of eras, an empty
* list must be returned.
*
* @return the list of eras for the chronology, may be immutable, not null
*/
List<Era> eras();
//-----------------------------------------------------------------------
/**
* Gets the range of valid values for the specified field.
* <p>
* All fields can be expressed as a {@code long} integer.
* This method returns an object that describes the valid range for that value.
* <p>
* Note that the result only describes the minimum and maximum valid values
* and it is important not to read too much into them. For example, there
* could be values within the range that are invalid for the field.
* <p>
* This method will return a result whether or not the chronology supports the field.
*
* @param field the field to get the range for, not null
* @return the range of valid values for the field, not null
* @throws DateTimeException if the range for the field cannot be obtained
*/
ValueRange range(ChronoField field);
//-----------------------------------------------------------------------
/**
* Gets the textual representation of this chronology.
* <p>
* This returns the textual name used to identify the chronology,
* suitable for presentation to the user.
* The parameters control the style of the returned text and the locale.
*
* @implSpec
* The default implementation behaves as though the formatter was used to
* format the chronology textual name.
*
* @param style the style of the text required, not null
* @param locale the locale to use, not null
* @return the text value of the chronology, not null
*/
default String getDisplayName(TextStyle style, Locale locale) {
TemporalAccessor temporal = new TemporalAccessor() {
@Override
public boolean isSupported(TemporalField field) {
return false;
}
@Override
public long getLong(TemporalField field) {
throw new UnsupportedTemporalTypeException("Unsupported field: " + field);
}
@SuppressWarnings("unchecked")
@Override
public <R> R query(TemporalQuery<R> query) {
if (query == TemporalQueries.chronology()) {
return (R) Chronology.this;
}
return TemporalAccessor.super.query(query);
}
};
return new DateTimeFormatterBuilder().appendChronologyText(style).toFormatter(locale).format(temporal);
}
//-----------------------------------------------------------------------
/**
* Resolves parsed {@code ChronoField} values into a date during parsing.
* <p>
* Most {@code TemporalField} implementations are resolved using the
* resolve method on the field. By contrast, the {@code ChronoField} class
* defines fields that only have meaning relative to the chronology.
* As such, {@code ChronoField} date fields are resolved here in the
* context of a specific chronology.
* <p>
* The default implementation, which explains typical resolve behaviour,
* is provided in {@link AbstractChronology}.
*
* @param fieldValues the map of fields to values, which can be updated, not null
* @param resolverStyle the requested type of resolve, not null
* @return the resolved date, null if insufficient information to create a date
* @throws DateTimeException if the date cannot be resolved, typically
* because of a conflict in the input data
*/
ChronoLocalDate resolveDate(Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle);
//-----------------------------------------------------------------------
/**
* Obtains a period for this chronology based on years, months and days.
* <p>
* This returns a period tied to this chronology using the specified
* years, months and days. All supplied chronologies use periods
* based on years, months and days, however the {@code ChronoPeriod} API
* allows the period to be represented using other units.
*
* @implSpec
* The default implementation returns an implementation class suitable
* for most calendar systems. It is based solely on the three units.
* Normalization, addition and subtraction derive the number of months
* in a year from the {@link #range(ChronoField)}. If the number of
* months within a year is fixed, then the calculation approach for
* addition, subtraction and normalization is slightly different.
* <p>
* If implementing an unusual calendar system that is not based on
* years, months and days, or where you want direct control, then
* the {@code ChronoPeriod} interface must be directly implemented.
* <p>
* The returned period is immutable and thread-safe.
*
* @param years the number of years, may be negative
* @param months the number of years, may be negative
* @param days the number of years, may be negative
* @return the period in terms of this chronology, not null
*/
default ChronoPeriod period(int years, int months, int days) {
return new ChronoPeriodImpl(this, years, months, days);
}
// ---------------------------------------------------------------------
/**
* Gets the number of seconds from the epoch of 1970-01-01T00:00:00Z.
*
* <p>The number of seconds is calculated using the proleptic-year, month, day-of-month, hour,
* minute, second, and zoneOffset.
*
* @param prolepticYear the chronology proleptic-year
* @param month the chronology month-of-year
* @param dayOfMonth the chronology day-of-month
* @param hour the hour-of-day, from 0 to 23
* @param minute the minute-of-hour, from 0 to 59
* @param second the second-of-minute, from 0 to 59
* @param zoneOffset the zone offset, not null
* @return the number of seconds relative to 1970-01-01T00:00:00Z, may be negative
* @throws DateTimeException if any of the values are out of range
* @since 9
*/
public default long epochSecond(
int prolepticYear,
int month,
int dayOfMonth,
int hour,
int minute,
int second,
ZoneOffset zoneOffset) {
Objects.requireNonNull(zoneOffset, "zoneOffset");
HOUR_OF_DAY.checkValidValue(hour);
MINUTE_OF_HOUR.checkValidValue(minute);
SECOND_OF_MINUTE.checkValidValue(second);
long daysInSec = Math.multiplyExact(date(prolepticYear, month, dayOfMonth).toEpochDay(), 86400);
long timeinSec = (hour * 60 + minute) * 60 + second;
return Math.addExact(daysInSec, timeinSec - zoneOffset.getTotalSeconds());
}
/**
* Gets the number of seconds from the epoch of 1970-01-01T00:00:00Z.
*
* <p>The number of seconds is calculated using the era, year-of-era, month, day-of-month, hour,
* minute, second, and zoneOffset.
*
* @param era the era of the correct type for the chronology, not null
* @param yearOfEra the chronology year-of-era
* @param month the chronology month-of-year
* @param dayOfMonth the chronology day-of-month
* @param hour the hour-of-day, from 0 to 23
* @param minute the minute-of-hour, from 0 to 59
* @param second the second-of-minute, from 0 to 59
* @param zoneOffset the zone offset, not null
* @return the number of seconds relative to 1970-01-01T00:00:00Z, may be negative
* @throws DateTimeException if any of the values are out of range
* @since 9
*/
public default long epochSecond(
Era era,
int yearOfEra,
int month,
int dayOfMonth,
int hour,
int minute,
int second,
ZoneOffset zoneOffset) {
Objects.requireNonNull(era, "era");
return epochSecond(
prolepticYear(era, yearOfEra), month, dayOfMonth, hour, minute, second, zoneOffset);
}
//-----------------------------------------------------------------------
/**
* Compares this chronology to another chronology.
* <p>
* The comparison order first by the chronology ID string, then by any
* additional information specific to the subclass.
* It is "consistent with equals", as defined by {@link Comparable}.
*
* @param other the other chronology to compare to, not null
* @return the comparator value, negative if less, positive if greater
*/
@Override
int compareTo(Chronology other);
/**
* Checks if this chronology is equal to another chronology.
* <p>
* The comparison is based on the entire state of the object.
*
* @param obj the object to check, null returns false
* @return true if this is equal to the other chronology
*/
@Override
boolean equals(Object obj);
/**
* A hash code for this chronology.
* <p>
* The hash code should be based on the entire state of the object.
*
* @return a suitable hash code
*/
@Override
int hashCode();
//-----------------------------------------------------------------------
/**
* Outputs this chronology as a {@code String}.
* <p>
* The format should include the entire state of the object.
*
* @return a string representation of this chronology, not null
*/
@Override
String toString();
}
|
googleapis/google-cloud-java | 37,700 | java-notebooks/proto-google-cloud-notebooks-v2/src/main/java/com/google/cloud/notebooks/v2/UpdateInstanceRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/notebooks/v2/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.notebooks.v2;
/**
*
*
* <pre>
* Request for updating a notebook instance.
* </pre>
*
* Protobuf type {@code google.cloud.notebooks.v2.UpdateInstanceRequest}
*/
public final class UpdateInstanceRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.notebooks.v2.UpdateInstanceRequest)
UpdateInstanceRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateInstanceRequest.newBuilder() to construct.
private UpdateInstanceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateInstanceRequest() {
requestId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateInstanceRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.notebooks.v2.ServiceProto
.internal_static_google_cloud_notebooks_v2_UpdateInstanceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.notebooks.v2.ServiceProto
.internal_static_google_cloud_notebooks_v2_UpdateInstanceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.notebooks.v2.UpdateInstanceRequest.class,
com.google.cloud.notebooks.v2.UpdateInstanceRequest.Builder.class);
}
private int bitField0_;
public static final int INSTANCE_FIELD_NUMBER = 1;
private com.google.cloud.notebooks.v2.Instance instance_;
/**
*
*
* <pre>
* Required. A representation of an instance.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.Instance instance = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the instance field is set.
*/
@java.lang.Override
public boolean hasInstance() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. A representation of an instance.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.Instance instance = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The instance.
*/
@java.lang.Override
public com.google.cloud.notebooks.v2.Instance getInstance() {
return instance_ == null
? com.google.cloud.notebooks.v2.Instance.getDefaultInstance()
: instance_;
}
/**
*
*
* <pre>
* Required. A representation of an instance.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.Instance instance = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.notebooks.v2.InstanceOrBuilder getInstanceOrBuilder() {
return instance_ == null
? com.google.cloud.notebooks.v2.Instance.getDefaultInstance()
: instance_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. Mask used to update an instance
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Mask used to update an instance
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. Mask used to update an instance
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int REQUEST_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. Idempotent request UUID.
* </pre>
*
* <code>string request_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Idempotent request UUID.
* </pre>
*
* <code>string request_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getInstance());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, requestId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getInstance());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, requestId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.notebooks.v2.UpdateInstanceRequest)) {
return super.equals(obj);
}
com.google.cloud.notebooks.v2.UpdateInstanceRequest other =
(com.google.cloud.notebooks.v2.UpdateInstanceRequest) obj;
if (hasInstance() != other.hasInstance()) return false;
if (hasInstance()) {
if (!getInstance().equals(other.getInstance())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getRequestId().equals(other.getRequestId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasInstance()) {
hash = (37 * hash) + INSTANCE_FIELD_NUMBER;
hash = (53 * hash) + getInstance().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.notebooks.v2.UpdateInstanceRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v2.UpdateInstanceRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v2.UpdateInstanceRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v2.UpdateInstanceRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v2.UpdateInstanceRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.notebooks.v2.UpdateInstanceRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.notebooks.v2.UpdateInstanceRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v2.UpdateInstanceRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.notebooks.v2.UpdateInstanceRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v2.UpdateInstanceRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.notebooks.v2.UpdateInstanceRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.notebooks.v2.UpdateInstanceRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.notebooks.v2.UpdateInstanceRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for updating a notebook instance.
* </pre>
*
* Protobuf type {@code google.cloud.notebooks.v2.UpdateInstanceRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.notebooks.v2.UpdateInstanceRequest)
com.google.cloud.notebooks.v2.UpdateInstanceRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.notebooks.v2.ServiceProto
.internal_static_google_cloud_notebooks_v2_UpdateInstanceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.notebooks.v2.ServiceProto
.internal_static_google_cloud_notebooks_v2_UpdateInstanceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.notebooks.v2.UpdateInstanceRequest.class,
com.google.cloud.notebooks.v2.UpdateInstanceRequest.Builder.class);
}
// Construct using com.google.cloud.notebooks.v2.UpdateInstanceRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getInstanceFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
instance_ = null;
if (instanceBuilder_ != null) {
instanceBuilder_.dispose();
instanceBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
requestId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.notebooks.v2.ServiceProto
.internal_static_google_cloud_notebooks_v2_UpdateInstanceRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.notebooks.v2.UpdateInstanceRequest getDefaultInstanceForType() {
return com.google.cloud.notebooks.v2.UpdateInstanceRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.notebooks.v2.UpdateInstanceRequest build() {
com.google.cloud.notebooks.v2.UpdateInstanceRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.notebooks.v2.UpdateInstanceRequest buildPartial() {
com.google.cloud.notebooks.v2.UpdateInstanceRequest result =
new com.google.cloud.notebooks.v2.UpdateInstanceRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.notebooks.v2.UpdateInstanceRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.instance_ = instanceBuilder_ == null ? instance_ : instanceBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.requestId_ = requestId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.notebooks.v2.UpdateInstanceRequest) {
return mergeFrom((com.google.cloud.notebooks.v2.UpdateInstanceRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.notebooks.v2.UpdateInstanceRequest other) {
if (other == com.google.cloud.notebooks.v2.UpdateInstanceRequest.getDefaultInstance())
return this;
if (other.hasInstance()) {
mergeInstance(other.getInstance());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (!other.getRequestId().isEmpty()) {
requestId_ = other.requestId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getInstanceFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
requestId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.notebooks.v2.Instance instance_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.notebooks.v2.Instance,
com.google.cloud.notebooks.v2.Instance.Builder,
com.google.cloud.notebooks.v2.InstanceOrBuilder>
instanceBuilder_;
/**
*
*
* <pre>
* Required. A representation of an instance.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.Instance instance = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the instance field is set.
*/
public boolean hasInstance() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. A representation of an instance.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.Instance instance = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The instance.
*/
public com.google.cloud.notebooks.v2.Instance getInstance() {
if (instanceBuilder_ == null) {
return instance_ == null
? com.google.cloud.notebooks.v2.Instance.getDefaultInstance()
: instance_;
} else {
return instanceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. A representation of an instance.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.Instance instance = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInstance(com.google.cloud.notebooks.v2.Instance value) {
if (instanceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
instance_ = value;
} else {
instanceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A representation of an instance.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.Instance instance = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInstance(com.google.cloud.notebooks.v2.Instance.Builder builderForValue) {
if (instanceBuilder_ == null) {
instance_ = builderForValue.build();
} else {
instanceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A representation of an instance.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.Instance instance = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeInstance(com.google.cloud.notebooks.v2.Instance value) {
if (instanceBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& instance_ != null
&& instance_ != com.google.cloud.notebooks.v2.Instance.getDefaultInstance()) {
getInstanceBuilder().mergeFrom(value);
} else {
instance_ = value;
}
} else {
instanceBuilder_.mergeFrom(value);
}
if (instance_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. A representation of an instance.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.Instance instance = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearInstance() {
bitField0_ = (bitField0_ & ~0x00000001);
instance_ = null;
if (instanceBuilder_ != null) {
instanceBuilder_.dispose();
instanceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A representation of an instance.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.Instance instance = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.notebooks.v2.Instance.Builder getInstanceBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getInstanceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. A representation of an instance.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.Instance instance = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.notebooks.v2.InstanceOrBuilder getInstanceOrBuilder() {
if (instanceBuilder_ != null) {
return instanceBuilder_.getMessageOrBuilder();
} else {
return instance_ == null
? com.google.cloud.notebooks.v2.Instance.getDefaultInstance()
: instance_;
}
}
/**
*
*
* <pre>
* Required. A representation of an instance.
* </pre>
*
* <code>
* .google.cloud.notebooks.v2.Instance instance = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.notebooks.v2.Instance,
com.google.cloud.notebooks.v2.Instance.Builder,
com.google.cloud.notebooks.v2.InstanceOrBuilder>
getInstanceFieldBuilder() {
if (instanceBuilder_ == null) {
instanceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.notebooks.v2.Instance,
com.google.cloud.notebooks.v2.Instance.Builder,
com.google.cloud.notebooks.v2.InstanceOrBuilder>(
getInstance(), getParentForChildren(), isClean());
instance_ = null;
}
return instanceBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. Mask used to update an instance
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Mask used to update an instance
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Mask used to update an instance
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask used to update an instance
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask used to update an instance
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Mask used to update an instance
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask used to update an instance
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Mask used to update an instance
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. Mask used to update an instance
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. Idempotent request UUID.
* </pre>
*
* <code>string request_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Idempotent request UUID.
* </pre>
*
* <code>string request_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Idempotent request UUID.
* </pre>
*
* <code>string request_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
requestId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Idempotent request UUID.
* </pre>
*
* <code>string request_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
requestId_ = getDefaultInstance().getRequestId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Idempotent request UUID.
* </pre>
*
* <code>string request_id = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
requestId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.notebooks.v2.UpdateInstanceRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.notebooks.v2.UpdateInstanceRequest)
private static final com.google.cloud.notebooks.v2.UpdateInstanceRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.notebooks.v2.UpdateInstanceRequest();
}
public static com.google.cloud.notebooks.v2.UpdateInstanceRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateInstanceRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateInstanceRequest>() {
@java.lang.Override
public UpdateInstanceRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateInstanceRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateInstanceRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.notebooks.v2.UpdateInstanceRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,746 | java-cloudbuild/proto-google-cloud-build-v2/src/main/java/com/google/cloudbuild/v2/CreateRepositoryRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/cloudbuild/v2/repositories.proto
// Protobuf Java Version: 3.25.8
package com.google.cloudbuild.v2;
/**
*
*
* <pre>
* Message for creating a Repository.
* </pre>
*
* Protobuf type {@code google.devtools.cloudbuild.v2.CreateRepositoryRequest}
*/
public final class CreateRepositoryRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.devtools.cloudbuild.v2.CreateRepositoryRequest)
CreateRepositoryRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateRepositoryRequest.newBuilder() to construct.
private CreateRepositoryRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateRepositoryRequest() {
parent_ = "";
repositoryId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateRepositoryRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloudbuild.v2.RepositoryManagerProto
.internal_static_google_devtools_cloudbuild_v2_CreateRepositoryRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloudbuild.v2.RepositoryManagerProto
.internal_static_google_devtools_cloudbuild_v2_CreateRepositoryRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloudbuild.v2.CreateRepositoryRequest.class,
com.google.cloudbuild.v2.CreateRepositoryRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The connection to contain the repository. If the request is part
* of a BatchCreateRepositoriesRequest, this field should be empty or match
* the parent specified there.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The connection to contain the repository. If the request is part
* of a BatchCreateRepositoriesRequest, this field should be empty or match
* the parent specified there.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REPOSITORY_FIELD_NUMBER = 2;
private com.google.cloudbuild.v2.Repository repository_;
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.devtools.cloudbuild.v2.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the repository field is set.
*/
@java.lang.Override
public boolean hasRepository() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.devtools.cloudbuild.v2.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The repository.
*/
@java.lang.Override
public com.google.cloudbuild.v2.Repository getRepository() {
return repository_ == null
? com.google.cloudbuild.v2.Repository.getDefaultInstance()
: repository_;
}
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.devtools.cloudbuild.v2.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloudbuild.v2.RepositoryOrBuilder getRepositoryOrBuilder() {
return repository_ == null
? com.google.cloudbuild.v2.Repository.getDefaultInstance()
: repository_;
}
public static final int REPOSITORY_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object repositoryId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the repository, which will become the final
* component of the repository's resource name. This ID should be unique in
* the connection. Allows alphanumeric characters and any of
* -._~%!$&'()*+,;=@.
* </pre>
*
* <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The repositoryId.
*/
@java.lang.Override
public java.lang.String getRepositoryId() {
java.lang.Object ref = repositoryId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
repositoryId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the repository, which will become the final
* component of the repository's resource name. This ID should be unique in
* the connection. Allows alphanumeric characters and any of
* -._~%!$&'()*+,;=@.
* </pre>
*
* <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for repositoryId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRepositoryIdBytes() {
java.lang.Object ref = repositoryId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
repositoryId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getRepository());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(repositoryId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, repositoryId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getRepository());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(repositoryId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, repositoryId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloudbuild.v2.CreateRepositoryRequest)) {
return super.equals(obj);
}
com.google.cloudbuild.v2.CreateRepositoryRequest other =
(com.google.cloudbuild.v2.CreateRepositoryRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasRepository() != other.hasRepository()) return false;
if (hasRepository()) {
if (!getRepository().equals(other.getRepository())) return false;
}
if (!getRepositoryId().equals(other.getRepositoryId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasRepository()) {
hash = (37 * hash) + REPOSITORY_FIELD_NUMBER;
hash = (53 * hash) + getRepository().hashCode();
}
hash = (37 * hash) + REPOSITORY_ID_FIELD_NUMBER;
hash = (53 * hash) + getRepositoryId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloudbuild.v2.CreateRepositoryRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloudbuild.v2.CreateRepositoryRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloudbuild.v2.CreateRepositoryRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloudbuild.v2.CreateRepositoryRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloudbuild.v2.CreateRepositoryRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloudbuild.v2.CreateRepositoryRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloudbuild.v2.CreateRepositoryRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloudbuild.v2.CreateRepositoryRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloudbuild.v2.CreateRepositoryRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloudbuild.v2.CreateRepositoryRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloudbuild.v2.CreateRepositoryRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloudbuild.v2.CreateRepositoryRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloudbuild.v2.CreateRepositoryRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for creating a Repository.
* </pre>
*
* Protobuf type {@code google.devtools.cloudbuild.v2.CreateRepositoryRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.devtools.cloudbuild.v2.CreateRepositoryRequest)
com.google.cloudbuild.v2.CreateRepositoryRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloudbuild.v2.RepositoryManagerProto
.internal_static_google_devtools_cloudbuild_v2_CreateRepositoryRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloudbuild.v2.RepositoryManagerProto
.internal_static_google_devtools_cloudbuild_v2_CreateRepositoryRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloudbuild.v2.CreateRepositoryRequest.class,
com.google.cloudbuild.v2.CreateRepositoryRequest.Builder.class);
}
// Construct using com.google.cloudbuild.v2.CreateRepositoryRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getRepositoryFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
repository_ = null;
if (repositoryBuilder_ != null) {
repositoryBuilder_.dispose();
repositoryBuilder_ = null;
}
repositoryId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloudbuild.v2.RepositoryManagerProto
.internal_static_google_devtools_cloudbuild_v2_CreateRepositoryRequest_descriptor;
}
@java.lang.Override
public com.google.cloudbuild.v2.CreateRepositoryRequest getDefaultInstanceForType() {
return com.google.cloudbuild.v2.CreateRepositoryRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloudbuild.v2.CreateRepositoryRequest build() {
com.google.cloudbuild.v2.CreateRepositoryRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloudbuild.v2.CreateRepositoryRequest buildPartial() {
com.google.cloudbuild.v2.CreateRepositoryRequest result =
new com.google.cloudbuild.v2.CreateRepositoryRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloudbuild.v2.CreateRepositoryRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.repository_ = repositoryBuilder_ == null ? repository_ : repositoryBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.repositoryId_ = repositoryId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloudbuild.v2.CreateRepositoryRequest) {
return mergeFrom((com.google.cloudbuild.v2.CreateRepositoryRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloudbuild.v2.CreateRepositoryRequest other) {
if (other == com.google.cloudbuild.v2.CreateRepositoryRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasRepository()) {
mergeRepository(other.getRepository());
}
if (!other.getRepositoryId().isEmpty()) {
repositoryId_ = other.repositoryId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getRepositoryFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
repositoryId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The connection to contain the repository. If the request is part
* of a BatchCreateRepositoriesRequest, this field should be empty or match
* the parent specified there.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The connection to contain the repository. If the request is part
* of a BatchCreateRepositoriesRequest, this field should be empty or match
* the parent specified there.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The connection to contain the repository. If the request is part
* of a BatchCreateRepositoriesRequest, this field should be empty or match
* the parent specified there.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The connection to contain the repository. If the request is part
* of a BatchCreateRepositoriesRequest, this field should be empty or match
* the parent specified there.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The connection to contain the repository. If the request is part
* of a BatchCreateRepositoriesRequest, this field should be empty or match
* the parent specified there.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloudbuild.v2.Repository repository_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloudbuild.v2.Repository,
com.google.cloudbuild.v2.Repository.Builder,
com.google.cloudbuild.v2.RepositoryOrBuilder>
repositoryBuilder_;
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.devtools.cloudbuild.v2.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the repository field is set.
*/
public boolean hasRepository() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.devtools.cloudbuild.v2.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The repository.
*/
public com.google.cloudbuild.v2.Repository getRepository() {
if (repositoryBuilder_ == null) {
return repository_ == null
? com.google.cloudbuild.v2.Repository.getDefaultInstance()
: repository_;
} else {
return repositoryBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.devtools.cloudbuild.v2.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRepository(com.google.cloudbuild.v2.Repository value) {
if (repositoryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
repository_ = value;
} else {
repositoryBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.devtools.cloudbuild.v2.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRepository(com.google.cloudbuild.v2.Repository.Builder builderForValue) {
if (repositoryBuilder_ == null) {
repository_ = builderForValue.build();
} else {
repositoryBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.devtools.cloudbuild.v2.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeRepository(com.google.cloudbuild.v2.Repository value) {
if (repositoryBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& repository_ != null
&& repository_ != com.google.cloudbuild.v2.Repository.getDefaultInstance()) {
getRepositoryBuilder().mergeFrom(value);
} else {
repository_ = value;
}
} else {
repositoryBuilder_.mergeFrom(value);
}
if (repository_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.devtools.cloudbuild.v2.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearRepository() {
bitField0_ = (bitField0_ & ~0x00000002);
repository_ = null;
if (repositoryBuilder_ != null) {
repositoryBuilder_.dispose();
repositoryBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.devtools.cloudbuild.v2.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloudbuild.v2.Repository.Builder getRepositoryBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getRepositoryFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.devtools.cloudbuild.v2.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloudbuild.v2.RepositoryOrBuilder getRepositoryOrBuilder() {
if (repositoryBuilder_ != null) {
return repositoryBuilder_.getMessageOrBuilder();
} else {
return repository_ == null
? com.google.cloudbuild.v2.Repository.getDefaultInstance()
: repository_;
}
}
/**
*
*
* <pre>
* Required. The repository to create.
* </pre>
*
* <code>
* .google.devtools.cloudbuild.v2.Repository repository = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloudbuild.v2.Repository,
com.google.cloudbuild.v2.Repository.Builder,
com.google.cloudbuild.v2.RepositoryOrBuilder>
getRepositoryFieldBuilder() {
if (repositoryBuilder_ == null) {
repositoryBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloudbuild.v2.Repository,
com.google.cloudbuild.v2.Repository.Builder,
com.google.cloudbuild.v2.RepositoryOrBuilder>(
getRepository(), getParentForChildren(), isClean());
repository_ = null;
}
return repositoryBuilder_;
}
private java.lang.Object repositoryId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the repository, which will become the final
* component of the repository's resource name. This ID should be unique in
* the connection. Allows alphanumeric characters and any of
* -._~%!$&'()*+,;=@.
* </pre>
*
* <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The repositoryId.
*/
public java.lang.String getRepositoryId() {
java.lang.Object ref = repositoryId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
repositoryId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the repository, which will become the final
* component of the repository's resource name. This ID should be unique in
* the connection. Allows alphanumeric characters and any of
* -._~%!$&'()*+,;=@.
* </pre>
*
* <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for repositoryId.
*/
public com.google.protobuf.ByteString getRepositoryIdBytes() {
java.lang.Object ref = repositoryId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
repositoryId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the repository, which will become the final
* component of the repository's resource name. This ID should be unique in
* the connection. Allows alphanumeric characters and any of
* -._~%!$&'()*+,;=@.
* </pre>
*
* <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The repositoryId to set.
* @return This builder for chaining.
*/
public Builder setRepositoryId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
repositoryId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the repository, which will become the final
* component of the repository's resource name. This ID should be unique in
* the connection. Allows alphanumeric characters and any of
* -._~%!$&'()*+,;=@.
* </pre>
*
* <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearRepositoryId() {
repositoryId_ = getDefaultInstance().getRepositoryId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the repository, which will become the final
* component of the repository's resource name. This ID should be unique in
* the connection. Allows alphanumeric characters and any of
* -._~%!$&'()*+,;=@.
* </pre>
*
* <code>string repository_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for repositoryId to set.
* @return This builder for chaining.
*/
public Builder setRepositoryIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
repositoryId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.devtools.cloudbuild.v2.CreateRepositoryRequest)
}
// @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v2.CreateRepositoryRequest)
private static final com.google.cloudbuild.v2.CreateRepositoryRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloudbuild.v2.CreateRepositoryRequest();
}
public static com.google.cloudbuild.v2.CreateRepositoryRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateRepositoryRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateRepositoryRequest>() {
@java.lang.Override
public CreateRepositoryRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateRepositoryRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateRepositoryRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloudbuild.v2.CreateRepositoryRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,637 | java-discoveryengine/proto-google-cloud-discoveryengine-v1beta/src/main/java/com/google/cloud/discoveryengine/v1beta/Sitemap.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1beta/site_search_engine.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1beta;
/**
*
*
* <pre>
* A sitemap for the SiteSearchEngine.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1beta.Sitemap}
*/
public final class Sitemap extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1beta.Sitemap)
SitemapOrBuilder {
private static final long serialVersionUID = 0L;
// Use Sitemap.newBuilder() to construct.
private Sitemap(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Sitemap() {
name_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Sitemap();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1beta.SiteSearchEngineProto
.internal_static_google_cloud_discoveryengine_v1beta_Sitemap_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1beta.SiteSearchEngineProto
.internal_static_google_cloud_discoveryengine_v1beta_Sitemap_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1beta.Sitemap.class,
com.google.cloud.discoveryengine.v1beta.Sitemap.Builder.class);
}
private int bitField0_;
private int feedCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object feed_;
public enum FeedCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
URI(2),
FEED_NOT_SET(0);
private final int value;
private FeedCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static FeedCase valueOf(int value) {
return forNumber(value);
}
public static FeedCase forNumber(int value) {
switch (value) {
case 2:
return URI;
case 0:
return FEED_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public FeedCase getFeedCase() {
return FeedCase.forNumber(feedCase_);
}
public static final int URI_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* Public URI for the sitemap, e.g. `www.example.com/sitemap.xml`.
* </pre>
*
* <code>string uri = 2;</code>
*
* @return Whether the uri field is set.
*/
public boolean hasUri() {
return feedCase_ == 2;
}
/**
*
*
* <pre>
* Public URI for the sitemap, e.g. `www.example.com/sitemap.xml`.
* </pre>
*
* <code>string uri = 2;</code>
*
* @return The uri.
*/
public java.lang.String getUri() {
java.lang.Object ref = "";
if (feedCase_ == 2) {
ref = feed_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (feedCase_ == 2) {
feed_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* Public URI for the sitemap, e.g. `www.example.com/sitemap.xml`.
* </pre>
*
* <code>string uri = 2;</code>
*
* @return The bytes for uri.
*/
public com.google.protobuf.ByteString getUriBytes() {
java.lang.Object ref = "";
if (feedCase_ == 2) {
ref = feed_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (feedCase_ == 2) {
feed_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Output only. The fully qualified resource name of the sitemap.
* `projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/sitemaps/*`
* The `sitemap_id` suffix is system-generated.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. The fully qualified resource name of the sitemap.
* `projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/sitemaps/*`
* The `sitemap_id` suffix is system-generated.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CREATE_TIME_FIELD_NUMBER = 3;
private com.google.protobuf.Timestamp createTime_;
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the createTime field is set.
*/
@java.lang.Override
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The createTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getCreateTime() {
return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_;
}
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() {
return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (feedCase_ == 2) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, feed_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getCreateTime());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (feedCase_ == 2) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, feed_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getCreateTime());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1beta.Sitemap)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1beta.Sitemap other =
(com.google.cloud.discoveryengine.v1beta.Sitemap) obj;
if (!getName().equals(other.getName())) return false;
if (hasCreateTime() != other.hasCreateTime()) return false;
if (hasCreateTime()) {
if (!getCreateTime().equals(other.getCreateTime())) return false;
}
if (!getFeedCase().equals(other.getFeedCase())) return false;
switch (feedCase_) {
case 2:
if (!getUri().equals(other.getUri())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (hasCreateTime()) {
hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getCreateTime().hashCode();
}
switch (feedCase_) {
case 2:
hash = (37 * hash) + URI_FIELD_NUMBER;
hash = (53 * hash) + getUri().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1beta.Sitemap parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1beta.Sitemap parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.Sitemap parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1beta.Sitemap parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.Sitemap parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1beta.Sitemap parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.Sitemap parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1beta.Sitemap parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.Sitemap parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1beta.Sitemap parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.Sitemap parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1beta.Sitemap parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.discoveryengine.v1beta.Sitemap prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A sitemap for the SiteSearchEngine.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1beta.Sitemap}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1beta.Sitemap)
com.google.cloud.discoveryengine.v1beta.SitemapOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1beta.SiteSearchEngineProto
.internal_static_google_cloud_discoveryengine_v1beta_Sitemap_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1beta.SiteSearchEngineProto
.internal_static_google_cloud_discoveryengine_v1beta_Sitemap_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1beta.Sitemap.class,
com.google.cloud.discoveryengine.v1beta.Sitemap.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1beta.Sitemap.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getCreateTimeFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
createTime_ = null;
if (createTimeBuilder_ != null) {
createTimeBuilder_.dispose();
createTimeBuilder_ = null;
}
feedCase_ = 0;
feed_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1beta.SiteSearchEngineProto
.internal_static_google_cloud_discoveryengine_v1beta_Sitemap_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.Sitemap getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1beta.Sitemap.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.Sitemap build() {
com.google.cloud.discoveryengine.v1beta.Sitemap result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.Sitemap buildPartial() {
com.google.cloud.discoveryengine.v1beta.Sitemap result =
new com.google.cloud.discoveryengine.v1beta.Sitemap(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.discoveryengine.v1beta.Sitemap result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.name_ = name_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.createTime_ = createTimeBuilder_ == null ? createTime_ : createTimeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
private void buildPartialOneofs(com.google.cloud.discoveryengine.v1beta.Sitemap result) {
result.feedCase_ = feedCase_;
result.feed_ = this.feed_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1beta.Sitemap) {
return mergeFrom((com.google.cloud.discoveryengine.v1beta.Sitemap) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1beta.Sitemap other) {
if (other == com.google.cloud.discoveryengine.v1beta.Sitemap.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasCreateTime()) {
mergeCreateTime(other.getCreateTime());
}
switch (other.getFeedCase()) {
case URI:
{
feedCase_ = 2;
feed_ = other.feed_;
onChanged();
break;
}
case FEED_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
feedCase_ = 2;
feed_ = s;
break;
} // case 18
case 26:
{
input.readMessage(getCreateTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int feedCase_ = 0;
private java.lang.Object feed_;
public FeedCase getFeedCase() {
return FeedCase.forNumber(feedCase_);
}
public Builder clearFeed() {
feedCase_ = 0;
feed_ = null;
onChanged();
return this;
}
private int bitField0_;
/**
*
*
* <pre>
* Public URI for the sitemap, e.g. `www.example.com/sitemap.xml`.
* </pre>
*
* <code>string uri = 2;</code>
*
* @return Whether the uri field is set.
*/
@java.lang.Override
public boolean hasUri() {
return feedCase_ == 2;
}
/**
*
*
* <pre>
* Public URI for the sitemap, e.g. `www.example.com/sitemap.xml`.
* </pre>
*
* <code>string uri = 2;</code>
*
* @return The uri.
*/
@java.lang.Override
public java.lang.String getUri() {
java.lang.Object ref = "";
if (feedCase_ == 2) {
ref = feed_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (feedCase_ == 2) {
feed_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Public URI for the sitemap, e.g. `www.example.com/sitemap.xml`.
* </pre>
*
* <code>string uri = 2;</code>
*
* @return The bytes for uri.
*/
@java.lang.Override
public com.google.protobuf.ByteString getUriBytes() {
java.lang.Object ref = "";
if (feedCase_ == 2) {
ref = feed_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (feedCase_ == 2) {
feed_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Public URI for the sitemap, e.g. `www.example.com/sitemap.xml`.
* </pre>
*
* <code>string uri = 2;</code>
*
* @param value The uri to set.
* @return This builder for chaining.
*/
public Builder setUri(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
feedCase_ = 2;
feed_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Public URI for the sitemap, e.g. `www.example.com/sitemap.xml`.
* </pre>
*
* <code>string uri = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearUri() {
if (feedCase_ == 2) {
feedCase_ = 0;
feed_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Public URI for the sitemap, e.g. `www.example.com/sitemap.xml`.
* </pre>
*
* <code>string uri = 2;</code>
*
* @param value The bytes for uri to set.
* @return This builder for chaining.
*/
public Builder setUriBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
feedCase_ = 2;
feed_ = value;
onChanged();
return this;
}
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Output only. The fully qualified resource name of the sitemap.
* `projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/sitemaps/*`
* The `sitemap_id` suffix is system-generated.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. The fully qualified resource name of the sitemap.
* `projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/sitemaps/*`
* The `sitemap_id` suffix is system-generated.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. The fully qualified resource name of the sitemap.
* `projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/sitemaps/*`
* The `sitemap_id` suffix is system-generated.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The fully qualified resource name of the sitemap.
* `projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/sitemaps/*`
* The `sitemap_id` suffix is system-generated.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The fully qualified resource name of the sitemap.
* `projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/sitemaps/*`
* The `sitemap_id` suffix is system-generated.
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.Timestamp createTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
createTimeBuilder_;
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the createTime field is set.
*/
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The createTime.
*/
public com.google.protobuf.Timestamp getCreateTime() {
if (createTimeBuilder_ == null) {
return createTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: createTime_;
} else {
return createTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setCreateTime(com.google.protobuf.Timestamp value) {
if (createTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
createTime_ = value;
} else {
createTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (createTimeBuilder_ == null) {
createTime_ = builderForValue.build();
} else {
createTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder mergeCreateTime(com.google.protobuf.Timestamp value) {
if (createTimeBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& createTime_ != null
&& createTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
getCreateTimeBuilder().mergeFrom(value);
} else {
createTime_ = value;
}
} else {
createTimeBuilder_.mergeFrom(value);
}
if (createTime_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder clearCreateTime() {
bitField0_ = (bitField0_ & ~0x00000004);
createTime_ = null;
if (createTimeBuilder_ != null) {
createTimeBuilder_.dispose();
createTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.protobuf.Timestamp.Builder getCreateTimeBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getCreateTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() {
if (createTimeBuilder_ != null) {
return createTimeBuilder_.getMessageOrBuilder();
} else {
return createTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: createTime_;
}
}
/**
*
*
* <pre>
* Output only. The sitemap's creation time.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getCreateTimeFieldBuilder() {
if (createTimeBuilder_ == null) {
createTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getCreateTime(), getParentForChildren(), isClean());
createTime_ = null;
}
return createTimeBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1beta.Sitemap)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1beta.Sitemap)
private static final com.google.cloud.discoveryengine.v1beta.Sitemap DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1beta.Sitemap();
}
public static com.google.cloud.discoveryengine.v1beta.Sitemap getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Sitemap> PARSER =
new com.google.protobuf.AbstractParser<Sitemap>() {
@java.lang.Override
public Sitemap parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<Sitemap> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Sitemap> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.Sitemap getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,991 | java-dataproc/google-cloud-dataproc/src/main/java/com/google/cloud/dataproc/v1/stub/HttpJsonSessionControllerStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dataproc.v1.stub;
import static com.google.cloud.dataproc.v1.SessionControllerClient.ListSessionsPagedResponse;
import com.google.api.HttpRule;
import com.google.api.core.InternalApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.httpjson.ApiMethodDescriptor;
import com.google.api.gax.httpjson.HttpJsonCallSettings;
import com.google.api.gax.httpjson.HttpJsonOperationSnapshot;
import com.google.api.gax.httpjson.HttpJsonStubCallableFactory;
import com.google.api.gax.httpjson.ProtoMessageRequestFormatter;
import com.google.api.gax.httpjson.ProtoMessageResponseParser;
import com.google.api.gax.httpjson.ProtoRestSerializer;
import com.google.api.gax.httpjson.longrunning.stub.HttpJsonOperationsStub;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.dataproc.v1.CreateSessionRequest;
import com.google.cloud.dataproc.v1.DeleteSessionRequest;
import com.google.cloud.dataproc.v1.GetSessionRequest;
import com.google.cloud.dataproc.v1.ListSessionsRequest;
import com.google.cloud.dataproc.v1.ListSessionsResponse;
import com.google.cloud.dataproc.v1.Session;
import com.google.cloud.dataproc.v1.SessionOperationMetadata;
import com.google.cloud.dataproc.v1.TerminateSessionRequest;
import com.google.common.collect.ImmutableMap;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.longrunning.Operation;
import com.google.protobuf.TypeRegistry;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* REST stub implementation for the SessionController service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class HttpJsonSessionControllerStub extends SessionControllerStub {
private static final TypeRegistry typeRegistry =
TypeRegistry.newBuilder()
.add(SessionOperationMetadata.getDescriptor())
.add(Session.getDescriptor())
.build();
private static final ApiMethodDescriptor<CreateSessionRequest, Operation>
createSessionMethodDescriptor =
ApiMethodDescriptor.<CreateSessionRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.dataproc.v1.SessionController/CreateSession")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<CreateSessionRequest>newBuilder()
.setPath(
"/v1/{parent=projects/*/locations/*}/sessions",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<CreateSessionRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<CreateSessionRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "requestId", request.getRequestId());
serializer.putQueryParam(fields, "sessionId", request.getSessionId());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("session", request.getSession(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(CreateSessionRequest request, Operation response) ->
HttpJsonOperationSnapshot.create(response))
.build();
private static final ApiMethodDescriptor<GetSessionRequest, Session> getSessionMethodDescriptor =
ApiMethodDescriptor.<GetSessionRequest, Session>newBuilder()
.setFullMethodName("google.cloud.dataproc.v1.SessionController/GetSession")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetSessionRequest>newBuilder()
.setPath(
"/v1/{name=projects/*/locations/*/sessions/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetSessionRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetSessionRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Session>newBuilder()
.setDefaultInstance(Session.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<ListSessionsRequest, ListSessionsResponse>
listSessionsMethodDescriptor =
ApiMethodDescriptor.<ListSessionsRequest, ListSessionsResponse>newBuilder()
.setFullMethodName("google.cloud.dataproc.v1.SessionController/ListSessions")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ListSessionsRequest>newBuilder()
.setPath(
"/v1/{parent=projects/*/locations/*}/sessions",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListSessionsRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListSessionsRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "filter", request.getFilter());
serializer.putQueryParam(fields, "pageSize", request.getPageSize());
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<ListSessionsResponse>newBuilder()
.setDefaultInstance(ListSessionsResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<TerminateSessionRequest, Operation>
terminateSessionMethodDescriptor =
ApiMethodDescriptor.<TerminateSessionRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.dataproc.v1.SessionController/TerminateSession")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<TerminateSessionRequest>newBuilder()
.setPath(
"/v1/{name=projects/*/locations/*/sessions/*}:terminate",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<TerminateSessionRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<TerminateSessionRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("*", request.toBuilder().clearName().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(TerminateSessionRequest request, Operation response) ->
HttpJsonOperationSnapshot.create(response))
.build();
private static final ApiMethodDescriptor<DeleteSessionRequest, Operation>
deleteSessionMethodDescriptor =
ApiMethodDescriptor.<DeleteSessionRequest, Operation>newBuilder()
.setFullMethodName("google.cloud.dataproc.v1.SessionController/DeleteSession")
.setHttpMethod("DELETE")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<DeleteSessionRequest>newBuilder()
.setPath(
"/v1/{name=projects/*/locations/*/sessions/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<DeleteSessionRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<DeleteSessionRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "requestId", request.getRequestId());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(DeleteSessionRequest request, Operation response) ->
HttpJsonOperationSnapshot.create(response))
.build();
private static final ApiMethodDescriptor<SetIamPolicyRequest, Policy>
setIamPolicyMethodDescriptor =
ApiMethodDescriptor.<SetIamPolicyRequest, Policy>newBuilder()
.setFullMethodName("google.iam.v1.IAMPolicy/SetIamPolicy")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<SetIamPolicyRequest>newBuilder()
.setPath(
"/v1/{resource=projects/*/regions/*/clusters/*}:setIamPolicy",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<SetIamPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "resource", request.getResource());
return fields;
})
.setAdditionalPaths(
"/v1/{resource=projects/*/regions/*/jobs/*}:setIamPolicy",
"/v1/{resource=projects/*/regions/*/operations/*}:setIamPolicy",
"/v1/{resource=projects/*/regions/*/workflowTemplates/*}:setIamPolicy",
"/v1/{resource=projects/*/locations/*/workflowTemplates/*}:setIamPolicy",
"/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:setIamPolicy",
"/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:setIamPolicy")
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<SetIamPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("*", request.toBuilder().clearResource().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Policy>newBuilder()
.setDefaultInstance(Policy.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<GetIamPolicyRequest, Policy>
getIamPolicyMethodDescriptor =
ApiMethodDescriptor.<GetIamPolicyRequest, Policy>newBuilder()
.setFullMethodName("google.iam.v1.IAMPolicy/GetIamPolicy")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetIamPolicyRequest>newBuilder()
.setPath(
"/v1/{resource=projects/*/regions/*/clusters/*}:getIamPolicy",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetIamPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "resource", request.getResource());
return fields;
})
.setAdditionalPaths(
"/v1/{resource=projects/*/regions/*/jobs/*}:getIamPolicy",
"/v1/{resource=projects/*/regions/*/operations/*}:getIamPolicy",
"/v1/{resource=projects/*/regions/*/workflowTemplates/*}:getIamPolicy",
"/v1/{resource=projects/*/locations/*/workflowTemplates/*}:getIamPolicy",
"/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:getIamPolicy",
"/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:getIamPolicy")
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetIamPolicyRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("*", request.toBuilder().clearResource().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Policy>newBuilder()
.setDefaultInstance(Policy.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsMethodDescriptor =
ApiMethodDescriptor.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder()
.setFullMethodName("google.iam.v1.IAMPolicy/TestIamPermissions")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<TestIamPermissionsRequest>newBuilder()
.setPath(
"/v1/{resource=projects/*/regions/*/clusters/*}:testIamPermissions",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<TestIamPermissionsRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "resource", request.getResource());
return fields;
})
.setAdditionalPaths(
"/v1/{resource=projects/*/regions/*/jobs/*}:testIamPermissions",
"/v1/{resource=projects/*/regions/*/operations/*}:testIamPermissions",
"/v1/{resource=projects/*/regions/*/workflowTemplates/*}:testIamPermissions",
"/v1/{resource=projects/*/locations/*/workflowTemplates/*}:testIamPermissions",
"/v1/{resource=projects/*/regions/*/autoscalingPolicies/*}:testIamPermissions",
"/v1/{resource=projects/*/locations/*/autoscalingPolicies/*}:testIamPermissions")
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<TestIamPermissionsRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("*", request.toBuilder().clearResource().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<TestIamPermissionsResponse>newBuilder()
.setDefaultInstance(TestIamPermissionsResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private final UnaryCallable<CreateSessionRequest, Operation> createSessionCallable;
private final OperationCallable<CreateSessionRequest, Session, SessionOperationMetadata>
createSessionOperationCallable;
private final UnaryCallable<GetSessionRequest, Session> getSessionCallable;
private final UnaryCallable<ListSessionsRequest, ListSessionsResponse> listSessionsCallable;
private final UnaryCallable<ListSessionsRequest, ListSessionsPagedResponse>
listSessionsPagedCallable;
private final UnaryCallable<TerminateSessionRequest, Operation> terminateSessionCallable;
private final OperationCallable<TerminateSessionRequest, Session, SessionOperationMetadata>
terminateSessionOperationCallable;
private final UnaryCallable<DeleteSessionRequest, Operation> deleteSessionCallable;
private final OperationCallable<DeleteSessionRequest, Session, SessionOperationMetadata>
deleteSessionOperationCallable;
private final UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable;
private final UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable;
private final UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsCallable;
private final BackgroundResource backgroundResources;
private final HttpJsonOperationsStub httpJsonOperationsStub;
private final HttpJsonStubCallableFactory callableFactory;
public static final HttpJsonSessionControllerStub create(SessionControllerStubSettings settings)
throws IOException {
return new HttpJsonSessionControllerStub(settings, ClientContext.create(settings));
}
public static final HttpJsonSessionControllerStub create(ClientContext clientContext)
throws IOException {
return new HttpJsonSessionControllerStub(
SessionControllerStubSettings.newHttpJsonBuilder().build(), clientContext);
}
public static final HttpJsonSessionControllerStub create(
ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException {
return new HttpJsonSessionControllerStub(
SessionControllerStubSettings.newHttpJsonBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of HttpJsonSessionControllerStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected HttpJsonSessionControllerStub(
SessionControllerStubSettings settings, ClientContext clientContext) throws IOException {
this(settings, clientContext, new HttpJsonSessionControllerCallableFactory());
}
/**
* Constructs an instance of HttpJsonSessionControllerStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected HttpJsonSessionControllerStub(
SessionControllerStubSettings settings,
ClientContext clientContext,
HttpJsonStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.httpJsonOperationsStub =
HttpJsonOperationsStub.create(
clientContext,
callableFactory,
typeRegistry,
ImmutableMap.<String, HttpRule>builder()
.put(
"google.longrunning.Operations.CancelOperation",
HttpRule.newBuilder()
.setPost("/v1/{name=projects/*/regions/*/operations/*}:cancel")
.addAdditionalBindings(
HttpRule.newBuilder()
.setPost("/v1/{name=projects/*/locations/*/operations/*}:cancel")
.build())
.build())
.put(
"google.longrunning.Operations.DeleteOperation",
HttpRule.newBuilder()
.setDelete("/v1/{name=projects/*/regions/*/operations/*}")
.addAdditionalBindings(
HttpRule.newBuilder()
.setDelete("/v1/{name=projects/*/locations/*/operations/*}")
.build())
.build())
.put(
"google.longrunning.Operations.GetOperation",
HttpRule.newBuilder()
.setGet("/v1/{name=projects/*/regions/*/operations/*}")
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet("/v1/{name=projects/*/locations/*/operations/*}")
.build())
.build())
.put(
"google.longrunning.Operations.ListOperations",
HttpRule.newBuilder()
.setGet("/v1/{name=projects/*/regions/*/operations}")
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet("/v1/{name=projects/*/locations/*/operations}")
.build())
.build())
.build());
HttpJsonCallSettings<CreateSessionRequest, Operation> createSessionTransportSettings =
HttpJsonCallSettings.<CreateSessionRequest, Operation>newBuilder()
.setMethodDescriptor(createSessionMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<GetSessionRequest, Session> getSessionTransportSettings =
HttpJsonCallSettings.<GetSessionRequest, Session>newBuilder()
.setMethodDescriptor(getSessionMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<ListSessionsRequest, ListSessionsResponse> listSessionsTransportSettings =
HttpJsonCallSettings.<ListSessionsRequest, ListSessionsResponse>newBuilder()
.setMethodDescriptor(listSessionsMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<TerminateSessionRequest, Operation> terminateSessionTransportSettings =
HttpJsonCallSettings.<TerminateSessionRequest, Operation>newBuilder()
.setMethodDescriptor(terminateSessionMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<DeleteSessionRequest, Operation> deleteSessionTransportSettings =
HttpJsonCallSettings.<DeleteSessionRequest, Operation>newBuilder()
.setMethodDescriptor(deleteSessionMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<SetIamPolicyRequest, Policy> setIamPolicyTransportSettings =
HttpJsonCallSettings.<SetIamPolicyRequest, Policy>newBuilder()
.setMethodDescriptor(setIamPolicyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
HttpJsonCallSettings<GetIamPolicyRequest, Policy> getIamPolicyTransportSettings =
HttpJsonCallSettings.<GetIamPolicyRequest, Policy>newBuilder()
.setMethodDescriptor(getIamPolicyMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
HttpJsonCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsTransportSettings =
HttpJsonCallSettings.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder()
.setMethodDescriptor(testIamPermissionsMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
this.createSessionCallable =
callableFactory.createUnaryCallable(
createSessionTransportSettings, settings.createSessionSettings(), clientContext);
this.createSessionOperationCallable =
callableFactory.createOperationCallable(
createSessionTransportSettings,
settings.createSessionOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.getSessionCallable =
callableFactory.createUnaryCallable(
getSessionTransportSettings, settings.getSessionSettings(), clientContext);
this.listSessionsCallable =
callableFactory.createUnaryCallable(
listSessionsTransportSettings, settings.listSessionsSettings(), clientContext);
this.listSessionsPagedCallable =
callableFactory.createPagedCallable(
listSessionsTransportSettings, settings.listSessionsSettings(), clientContext);
this.terminateSessionCallable =
callableFactory.createUnaryCallable(
terminateSessionTransportSettings, settings.terminateSessionSettings(), clientContext);
this.terminateSessionOperationCallable =
callableFactory.createOperationCallable(
terminateSessionTransportSettings,
settings.terminateSessionOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.deleteSessionCallable =
callableFactory.createUnaryCallable(
deleteSessionTransportSettings, settings.deleteSessionSettings(), clientContext);
this.deleteSessionOperationCallable =
callableFactory.createOperationCallable(
deleteSessionTransportSettings,
settings.deleteSessionOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.setIamPolicyCallable =
callableFactory.createUnaryCallable(
setIamPolicyTransportSettings, settings.setIamPolicySettings(), clientContext);
this.getIamPolicyCallable =
callableFactory.createUnaryCallable(
getIamPolicyTransportSettings, settings.getIamPolicySettings(), clientContext);
this.testIamPermissionsCallable =
callableFactory.createUnaryCallable(
testIamPermissionsTransportSettings,
settings.testIamPermissionsSettings(),
clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
@InternalApi
public static List<ApiMethodDescriptor> getMethodDescriptors() {
List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>();
methodDescriptors.add(createSessionMethodDescriptor);
methodDescriptors.add(getSessionMethodDescriptor);
methodDescriptors.add(listSessionsMethodDescriptor);
methodDescriptors.add(terminateSessionMethodDescriptor);
methodDescriptors.add(deleteSessionMethodDescriptor);
methodDescriptors.add(setIamPolicyMethodDescriptor);
methodDescriptors.add(getIamPolicyMethodDescriptor);
methodDescriptors.add(testIamPermissionsMethodDescriptor);
return methodDescriptors;
}
public HttpJsonOperationsStub getHttpJsonOperationsStub() {
return httpJsonOperationsStub;
}
@Override
public UnaryCallable<CreateSessionRequest, Operation> createSessionCallable() {
return createSessionCallable;
}
@Override
public OperationCallable<CreateSessionRequest, Session, SessionOperationMetadata>
createSessionOperationCallable() {
return createSessionOperationCallable;
}
@Override
public UnaryCallable<GetSessionRequest, Session> getSessionCallable() {
return getSessionCallable;
}
@Override
public UnaryCallable<ListSessionsRequest, ListSessionsResponse> listSessionsCallable() {
return listSessionsCallable;
}
@Override
public UnaryCallable<ListSessionsRequest, ListSessionsPagedResponse> listSessionsPagedCallable() {
return listSessionsPagedCallable;
}
@Override
public UnaryCallable<TerminateSessionRequest, Operation> terminateSessionCallable() {
return terminateSessionCallable;
}
@Override
public OperationCallable<TerminateSessionRequest, Session, SessionOperationMetadata>
terminateSessionOperationCallable() {
return terminateSessionOperationCallable;
}
@Override
public UnaryCallable<DeleteSessionRequest, Operation> deleteSessionCallable() {
return deleteSessionCallable;
}
@Override
public OperationCallable<DeleteSessionRequest, Session, SessionOperationMetadata>
deleteSessionOperationCallable() {
return deleteSessionOperationCallable;
}
@Override
public UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable() {
return setIamPolicyCallable;
}
@Override
public UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable() {
return getIamPolicyCallable;
}
@Override
public UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsCallable() {
return testIamPermissionsCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
googleapis/google-cloud-java | 37,739 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/StreamQueryReasoningEngineRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/reasoning_engine_execution_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Request message for [ReasoningEngineExecutionService.StreamQuery][].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest}
*/
public final class StreamQueryReasoningEngineRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest)
StreamQueryReasoningEngineRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use StreamQueryReasoningEngineRequest.newBuilder() to construct.
private StreamQueryReasoningEngineRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private StreamQueryReasoningEngineRequest() {
name_ = "";
classMethod_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new StreamQueryReasoningEngineRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.ReasoningEngineExecutionServiceProto
.internal_static_google_cloud_aiplatform_v1_StreamQueryReasoningEngineRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.ReasoningEngineExecutionServiceProto
.internal_static_google_cloud_aiplatform_v1_StreamQueryReasoningEngineRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest.class,
com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest.Builder.class);
}
private int bitField0_;
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The name of the ReasoningEngine resource to use.
* Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the ReasoningEngine resource to use.
* Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int INPUT_FIELD_NUMBER = 2;
private com.google.protobuf.Struct input_;
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the input field is set.
*/
@java.lang.Override
public boolean hasInput() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The input.
*/
@java.lang.Override
public com.google.protobuf.Struct getInput() {
return input_ == null ? com.google.protobuf.Struct.getDefaultInstance() : input_;
}
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
@java.lang.Override
public com.google.protobuf.StructOrBuilder getInputOrBuilder() {
return input_ == null ? com.google.protobuf.Struct.getDefaultInstance() : input_;
}
public static final int CLASS_METHOD_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object classMethod_ = "";
/**
*
*
* <pre>
* Optional. Class method to be used for the stream query.
* It is optional and defaults to "stream_query" if unspecified.
* </pre>
*
* <code>string class_method = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The classMethod.
*/
@java.lang.Override
public java.lang.String getClassMethod() {
java.lang.Object ref = classMethod_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
classMethod_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Class method to be used for the stream query.
* It is optional and defaults to "stream_query" if unspecified.
* </pre>
*
* <code>string class_method = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for classMethod.
*/
@java.lang.Override
public com.google.protobuf.ByteString getClassMethodBytes() {
java.lang.Object ref = classMethod_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
classMethod_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getInput());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(classMethod_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, classMethod_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getInput());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(classMethod_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, classMethod_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest other =
(com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest) obj;
if (!getName().equals(other.getName())) return false;
if (hasInput() != other.hasInput()) return false;
if (hasInput()) {
if (!getInput().equals(other.getInput())) return false;
}
if (!getClassMethod().equals(other.getClassMethod())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (hasInput()) {
hash = (37 * hash) + INPUT_FIELD_NUMBER;
hash = (53 * hash) + getInput().hashCode();
}
hash = (37 * hash) + CLASS_METHOD_FIELD_NUMBER;
hash = (53 * hash) + getClassMethod().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for [ReasoningEngineExecutionService.StreamQuery][].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest)
com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.ReasoningEngineExecutionServiceProto
.internal_static_google_cloud_aiplatform_v1_StreamQueryReasoningEngineRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.ReasoningEngineExecutionServiceProto
.internal_static_google_cloud_aiplatform_v1_StreamQueryReasoningEngineRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest.class,
com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getInputFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
input_ = null;
if (inputBuilder_ != null) {
inputBuilder_.dispose();
inputBuilder_ = null;
}
classMethod_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.ReasoningEngineExecutionServiceProto
.internal_static_google_cloud_aiplatform_v1_StreamQueryReasoningEngineRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest build() {
com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest buildPartial() {
com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest result =
new com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.input_ = inputBuilder_ == null ? input_ : inputBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.classMethod_ = classMethod_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest) {
return mergeFrom((com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest other) {
if (other
== com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasInput()) {
mergeInput(other.getInput());
}
if (!other.getClassMethod().isEmpty()) {
classMethod_ = other.classMethod_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getInputFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
classMethod_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The name of the ReasoningEngine resource to use.
* Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the ReasoningEngine resource to use.
* Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the ReasoningEngine resource to use.
* Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the ReasoningEngine resource to use.
* Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the ReasoningEngine resource to use.
* Format:
* `projects/{project}/locations/{location}/reasoningEngines/{reasoning_engine}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.Struct input_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Struct,
com.google.protobuf.Struct.Builder,
com.google.protobuf.StructOrBuilder>
inputBuilder_;
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the input field is set.
*/
public boolean hasInput() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The input.
*/
public com.google.protobuf.Struct getInput() {
if (inputBuilder_ == null) {
return input_ == null ? com.google.protobuf.Struct.getDefaultInstance() : input_;
} else {
return inputBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder setInput(com.google.protobuf.Struct value) {
if (inputBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
input_ = value;
} else {
inputBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder setInput(com.google.protobuf.Struct.Builder builderForValue) {
if (inputBuilder_ == null) {
input_ = builderForValue.build();
} else {
inputBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder mergeInput(com.google.protobuf.Struct value) {
if (inputBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& input_ != null
&& input_ != com.google.protobuf.Struct.getDefaultInstance()) {
getInputBuilder().mergeFrom(value);
} else {
input_ = value;
}
} else {
inputBuilder_.mergeFrom(value);
}
if (input_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public Builder clearInput() {
bitField0_ = (bitField0_ & ~0x00000002);
input_ = null;
if (inputBuilder_ != null) {
inputBuilder_.dispose();
inputBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public com.google.protobuf.Struct.Builder getInputBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getInputFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
public com.google.protobuf.StructOrBuilder getInputOrBuilder() {
if (inputBuilder_ != null) {
return inputBuilder_.getMessageOrBuilder();
} else {
return input_ == null ? com.google.protobuf.Struct.getDefaultInstance() : input_;
}
}
/**
*
*
* <pre>
* Optional. Input content provided by users in JSON object format. Examples
* include text query, function calling parameters, media bytes, etc.
* </pre>
*
* <code>.google.protobuf.Struct input = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Struct,
com.google.protobuf.Struct.Builder,
com.google.protobuf.StructOrBuilder>
getInputFieldBuilder() {
if (inputBuilder_ == null) {
inputBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Struct,
com.google.protobuf.Struct.Builder,
com.google.protobuf.StructOrBuilder>(getInput(), getParentForChildren(), isClean());
input_ = null;
}
return inputBuilder_;
}
private java.lang.Object classMethod_ = "";
/**
*
*
* <pre>
* Optional. Class method to be used for the stream query.
* It is optional and defaults to "stream_query" if unspecified.
* </pre>
*
* <code>string class_method = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The classMethod.
*/
public java.lang.String getClassMethod() {
java.lang.Object ref = classMethod_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
classMethod_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Class method to be used for the stream query.
* It is optional and defaults to "stream_query" if unspecified.
* </pre>
*
* <code>string class_method = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for classMethod.
*/
public com.google.protobuf.ByteString getClassMethodBytes() {
java.lang.Object ref = classMethod_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
classMethod_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Class method to be used for the stream query.
* It is optional and defaults to "stream_query" if unspecified.
* </pre>
*
* <code>string class_method = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The classMethod to set.
* @return This builder for chaining.
*/
public Builder setClassMethod(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
classMethod_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Class method to be used for the stream query.
* It is optional and defaults to "stream_query" if unspecified.
* </pre>
*
* <code>string class_method = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearClassMethod() {
classMethod_ = getDefaultInstance().getClassMethod();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Class method to be used for the stream query.
* It is optional and defaults to "stream_query" if unspecified.
* </pre>
*
* <code>string class_method = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for classMethod to set.
* @return This builder for chaining.
*/
public Builder setClassMethodBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
classMethod_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest)
private static final com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest();
}
public static com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<StreamQueryReasoningEngineRequest> PARSER =
new com.google.protobuf.AbstractParser<StreamQueryReasoningEngineRequest>() {
@java.lang.Override
public StreamQueryReasoningEngineRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<StreamQueryReasoningEngineRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<StreamQueryReasoningEngineRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.StreamQueryReasoningEngineRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/incubator-brooklyn | 37,824 | brooklyn-server/software/base/src/test/java/org/apache/brooklyn/entity/brooklynnode/BrooklynNodeIntegrationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.brooklyn.entity.brooklynnode;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import java.io.File;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.net.URI;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import org.apache.brooklyn.api.effector.Effector;
import org.apache.brooklyn.api.entity.Entity;
import org.apache.brooklyn.api.entity.EntityLocal;
import org.apache.brooklyn.api.entity.EntitySpec;
import org.apache.brooklyn.core.entity.Entities;
import org.apache.brooklyn.core.entity.lifecycle.Lifecycle;
import org.apache.brooklyn.core.internal.BrooklynProperties;
import org.apache.brooklyn.core.location.Locations;
import org.apache.brooklyn.core.location.PortRanges;
import org.apache.brooklyn.core.objs.proxy.EntityProxyImpl;
import org.apache.brooklyn.core.test.BrooklynAppUnitTestSupport;
import org.apache.brooklyn.entity.brooklynnode.BrooklynNode.DeployBlueprintEffector;
import org.apache.brooklyn.entity.brooklynnode.BrooklynNode.ExistingFileBehaviour;
import org.apache.brooklyn.entity.brooklynnode.BrooklynNode.StopNodeAndKillAppsEffector;
import org.apache.brooklyn.entity.software.base.SoftwareProcess.StopSoftwareParameters.StopMode;
import org.apache.brooklyn.entity.stock.BasicApplication;
import org.apache.brooklyn.entity.stock.BasicApplicationImpl;
import org.apache.brooklyn.feed.http.JsonFunctions;
import org.apache.brooklyn.test.Asserts;
import org.apache.brooklyn.test.EntityTestUtils;
import org.apache.brooklyn.test.HttpTestUtils;
import org.apache.brooklyn.util.collections.MutableMap;
import org.apache.brooklyn.util.core.config.ConfigBag;
import org.apache.brooklyn.util.http.HttpTool;
import org.apache.brooklyn.util.http.HttpToolResponse;
import org.apache.brooklyn.util.exceptions.Exceptions;
import org.apache.brooklyn.util.guava.Functionals;
import org.apache.brooklyn.util.javalang.JavaClassNames;
import org.apache.brooklyn.util.net.Networking;
import org.apache.brooklyn.util.net.Urls;
import org.apache.brooklyn.util.os.Os;
import org.apache.brooklyn.util.text.Strings;
import org.apache.brooklyn.util.time.Duration;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.HttpClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import org.apache.brooklyn.location.localhost.LocalhostMachineProvisioningLocation;
import org.apache.brooklyn.location.ssh.SshMachineLocation;
import com.google.common.base.Charsets;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.io.Files;
/**
* This test needs to able to access the binary artifact in order to run.
* The default behaviour is to take this from maven, which works pretty well if you're downloading from hosted maven.
* <p>
* This class has been updated so that it does not effect or depend on the contents of ~/.brooklyn/brooklyn.properties .
* <p>
* If you wish to supply your own version (useful if testing changes locally!), you'll need to force download of this file.
* The simplest way is to install:
* <ul>
* <li>file://$HOME/.brooklyn/repository/BrooklynNode/${VERSION}/BrooklynNode-${VERSION}.tar.gz - for snapshot versions (filename is default format due to lack of filename in sonatype inferencing;
* note on case-sensitive systems it might have to be all in lower case!)
* <li>file://$HOME/.brooklyn/repository/BrooklynNode/${VERSION}/brooklyn-${VERSION}-dist.tar.gz - for release versions, filename should match that in maven central
* </ul>
* In both cases, remember that you may also need to wipe the local apps cache ($BROOKLYN_DATA_DIR/installs/BrooklynNode).
* The following commands may be useful:
* <p>
* <code>
* cp ~/.m2/repository/org/apache/brooklyn/brooklyn-dist/0.7.0-SNAPSHOT/brooklyn-dist-0.7.0-SNAPSHOT-dist.tar.gz ~/.brooklyn/repository/BrooklynNode/0.7.0-SNAPSHOT/BrooklynNode-0.7.0-SNAPSHOT.tar.gz
* rm -rf /tmp/brooklyn-`whoami`/installs/BrooklynNode*
* </code>
*/
public class BrooklynNodeIntegrationTest extends BrooklynAppUnitTestSupport {
private static final Logger log = LoggerFactory.getLogger(BrooklynNodeIntegrationTest.class);
private File pseudoBrooklynPropertiesFile;
private File pseudoBrooklynCatalogFile;
private File persistenceDir;
private LocalhostMachineProvisioningLocation loc;
private List<LocalhostMachineProvisioningLocation> locs;
@BeforeMethod(alwaysRun=true)
@Override
public void setUp() throws Exception {
super.setUp();
pseudoBrooklynPropertiesFile = Os.newTempFile("brooklynnode-test", ".properties");
pseudoBrooklynPropertiesFile.delete();
pseudoBrooklynCatalogFile = Os.newTempFile("brooklynnode-test", ".catalog");
pseudoBrooklynCatalogFile.delete();
loc = app.newLocalhostProvisioningLocation();
locs = ImmutableList.of(loc);
}
@AfterMethod(alwaysRun=true)
@Override
public void tearDown() throws Exception {
try {
super.tearDown();
} finally {
if (pseudoBrooklynPropertiesFile != null) pseudoBrooklynPropertiesFile.delete();
if (pseudoBrooklynCatalogFile != null) pseudoBrooklynCatalogFile.delete();
if (persistenceDir != null) Os.deleteRecursively(persistenceDir);
}
}
protected EntitySpec<BrooklynNode> newBrooklynNodeSpecForTest() {
// poor man's way to output which test is running
log.info("Creating entity spec for "+JavaClassNames.callerNiceClassAndMethod(1));
return EntitySpec.create(BrooklynNode.class)
.configure(BrooklynNode.WEB_CONSOLE_BIND_ADDRESS, Networking.ANY_NIC)
.configure(BrooklynNode.ON_EXISTING_PROPERTIES_FILE, ExistingFileBehaviour.DO_NOT_USE);
/* yaml equivalent, for testing:
location: localhost
services:
- type: org.apache.brooklyn.entity.brooklynnode.BrooklynNode
bindAddress: 127.0.0.1
onExistingProperties: do_not_use
# some other options
enabledHttpProtocols: [ https ]
managementPassword: s3cr3t
brooklynLocalPropertiesContents: |
brooklyn.webconsole.security.https.required=true
brooklyn.webconsole.security.users=admin
brooklyn.webconsole.security.user.admin.password=s3cr3t
brooklyn.location.localhost.enabled=false
*/
}
@Test(groups="Integration")
public void testCanStartAndStop() throws Exception {
BrooklynNode brooklynNode = app.createAndManageChild(newBrooklynNodeSpecForTest());
app.start(locs);
log.info("started "+app+" containing "+brooklynNode+" for "+JavaClassNames.niceClassAndMethod());
EntityTestUtils.assertAttributeEqualsEventually(brooklynNode, BrooklynNode.SERVICE_UP, true);
brooklynNode.stop();
EntityTestUtils.assertAttributeEquals(brooklynNode, BrooklynNode.SERVICE_UP, false);
}
@Test(groups="Integration")
public void testSetsGlobalBrooklynPropertiesFromContents() throws Exception {
BrooklynNode brooklynNode = app.createAndManageChild(newBrooklynNodeSpecForTest()
.configure(BrooklynNode.BROOKLYN_GLOBAL_PROPERTIES_REMOTE_PATH, pseudoBrooklynPropertiesFile.getAbsolutePath())
.configure(BrooklynNode.BROOKLYN_GLOBAL_PROPERTIES_CONTENTS, "abc=def"));
app.start(locs);
log.info("started "+app+" containing "+brooklynNode+" for "+JavaClassNames.niceClassAndMethod());
assertEquals(Files.readLines(pseudoBrooklynPropertiesFile, Charsets.UTF_8), ImmutableList.of("abc=def"));
}
@Test(groups="Integration")
public void testSetsLocalBrooklynPropertiesFromContents() throws Exception {
BrooklynNode brooklynNode = app.createAndManageChild(newBrooklynNodeSpecForTest()
.configure(BrooklynNode.BROOKLYN_LOCAL_PROPERTIES_REMOTE_PATH, pseudoBrooklynPropertiesFile.getAbsolutePath())
.configure(BrooklynNode.BROOKLYN_LOCAL_PROPERTIES_CONTENTS, "abc=def"));
app.start(locs);
log.info("started "+app+" containing "+brooklynNode+" for "+JavaClassNames.niceClassAndMethod());
assertEquals(Files.readLines(pseudoBrooklynPropertiesFile, Charsets.UTF_8), ImmutableList.of("abc=def"));
}
@Test(groups="Integration")
public void testSetsBrooklynPropertiesFromUri() throws Exception {
File brooklynPropertiesSourceFile = File.createTempFile("brooklynnode-test", ".properties");
Files.write("abc=def", brooklynPropertiesSourceFile, Charsets.UTF_8);
BrooklynNode brooklynNode = app.createAndManageChild(newBrooklynNodeSpecForTest()
.configure(BrooklynNode.BROOKLYN_GLOBAL_PROPERTIES_REMOTE_PATH, pseudoBrooklynPropertiesFile.getAbsolutePath())
.configure(BrooklynNode.BROOKLYN_GLOBAL_PROPERTIES_URI, brooklynPropertiesSourceFile.toURI().toString()));
app.start(locs);
log.info("started "+app+" containing "+brooklynNode+" for "+JavaClassNames.niceClassAndMethod());
assertEquals(Files.readLines(pseudoBrooklynPropertiesFile, Charsets.UTF_8), ImmutableList.of("abc=def"));
}
@Test(groups="Integration")
public void testSetsBrooklynCatalogFromContents() throws Exception {
BrooklynNode brooklynNode = app.createAndManageChild(newBrooklynNodeSpecForTest()
.configure(BrooklynNode.BROOKLYN_CATALOG_REMOTE_PATH, pseudoBrooklynCatalogFile.getAbsolutePath())
.configure(BrooklynNode.BROOKLYN_CATALOG_CONTENTS, "<catalog/>"));
app.start(locs);
log.info("started "+app+" containing "+brooklynNode+" for "+JavaClassNames.niceClassAndMethod());
assertEquals(Files.readLines(pseudoBrooklynCatalogFile, Charsets.UTF_8), ImmutableList.of("<catalog/>"));
}
@Test(groups="Integration")
public void testSetsBrooklynCatalogFromUri() throws Exception {
File brooklynCatalogSourceFile = File.createTempFile("brooklynnode-test", ".catalog");
Files.write("abc=def", brooklynCatalogSourceFile, Charsets.UTF_8);
BrooklynNode brooklynNode = app.createAndManageChild(newBrooklynNodeSpecForTest()
.configure(BrooklynNode.BROOKLYN_CATALOG_REMOTE_PATH, pseudoBrooklynCatalogFile.getAbsolutePath())
.configure(BrooklynNode.BROOKLYN_CATALOG_URI, brooklynCatalogSourceFile.toURI().toString()));
app.start(locs);
log.info("started "+app+" containing "+brooklynNode+" for "+JavaClassNames.niceClassAndMethod());
assertEquals(Files.readLines(pseudoBrooklynCatalogFile, Charsets.UTF_8), ImmutableList.of("abc=def"));
}
@Test(groups="Integration")
public void testCopiesResources() throws Exception {
File sourceFile = File.createTempFile("brooklynnode-test", ".properties");
Files.write("abc=def", sourceFile, Charsets.UTF_8);
File tempDir = Files.createTempDir();
File expectedFile = new File(tempDir, "myfile.txt");
try {
BrooklynNode brooklynNode = app.createAndManageChild(newBrooklynNodeSpecForTest()
.configure(BrooklynNode.RUN_DIR, tempDir.getAbsolutePath())
.configure(BrooklynNode.COPY_TO_RUNDIR, ImmutableMap.of(sourceFile.getAbsolutePath(), "${RUN}/myfile.txt")));
app.start(locs);
log.info("started "+app+" containing "+brooklynNode+" for "+JavaClassNames.niceClassAndMethod());
assertEquals(Files.readLines(expectedFile, Charsets.UTF_8), ImmutableList.of("abc=def"));
} finally {
expectedFile.delete();
tempDir.delete();
sourceFile.delete();
}
}
@Test(groups="Integration")
public void testCopiesClasspathEntriesInConfigKey() throws Exception {
String content = "abc=def";
File classpathEntry1 = File.createTempFile("first", ".properties");
File classpathEntry2 = File.createTempFile("second", ".properties");
Files.write(content, classpathEntry1, Charsets.UTF_8);
Files.write(content, classpathEntry2, Charsets.UTF_8);
File tempDir = Files.createTempDir();
File destDir = new File(new File(tempDir, "lib"), "dropins");
File expectedFile1 = new File(destDir, classpathEntry1.getName());
File expectedFile2 = new File(destDir, classpathEntry2.getName());
try {
BrooklynNode brooklynNode = app.createAndManageChild(newBrooklynNodeSpecForTest()
.configure(BrooklynNode.RUN_DIR, tempDir.getAbsolutePath())
.configure(BrooklynNode.CLASSPATH, ImmutableList.of(classpathEntry1.getAbsolutePath(), classpathEntry2.getAbsolutePath()))
);
app.start(locs);
log.info("started "+app+" containing "+brooklynNode+" for "+JavaClassNames.niceClassAndMethod());
assertEquals(Files.readLines(expectedFile1, Charsets.UTF_8), ImmutableList.of(content));
assertEquals(Files.readLines(expectedFile2, Charsets.UTF_8), ImmutableList.of(content));
} finally {
expectedFile1.delete();
expectedFile2.delete();
tempDir.delete();
classpathEntry1.delete();
classpathEntry2.delete();
}
}
@Test(groups="Integration")
public void testCopiesClasspathEntriesInConfigKey2() throws Exception {
String content = "abc=def";
File classpathEntry1 = File.createTempFile("first", ".properties");
File classpathEntry2 = File.createTempFile("second", ".properties");
Files.write(content, classpathEntry1, Charsets.UTF_8);
Files.write(content, classpathEntry2, Charsets.UTF_8);
File tempDir = Files.createTempDir();
String testName1 = "test_" + classpathEntry1.getName();
File destDir = new File(new File(tempDir, "lib"), "dropins");
File expectedFile1 = new File(destDir, testName1);
String testName2 = "test_" + classpathEntry2.getName();
File expectedFile2 = new File(destDir, testName2);
Map entry1 = ImmutableMap.of("url", classpathEntry1.getAbsolutePath(), "filename", testName1);
Map entry2 = ImmutableMap.of("url", classpathEntry2.getAbsolutePath(), "filename", testName2);
try {
BrooklynNode brooklynNode = app.createAndManageChild(newBrooklynNodeSpecForTest()
.configure(BrooklynNode.RUN_DIR, tempDir.getAbsolutePath())
.configure(BrooklynNode.CLASSPATH, ImmutableList.of(entry1, entry2))
);
app.start(locs);
log.info("started "+app+" containing "+brooklynNode+" for "+JavaClassNames.niceClassAndMethod());
assertEquals(Files.readLines(expectedFile1, Charsets.UTF_8), ImmutableList.of(content));
assertEquals(Files.readLines(expectedFile2, Charsets.UTF_8), ImmutableList.of(content));
} finally {
expectedFile1.delete();
expectedFile2.delete();
tempDir.delete();
classpathEntry1.delete();
classpathEntry2.delete();
}
}
/*
Exception java.io.FileNotFoundException
Message: /tmp/1445824492556-0/lib/first4759470075693094333.properties (No such file or directory)
Stacktrace:
at java.io.FileInputStream.open(Native Method)
at java.io.FileInputStream.<init>(FileInputStream.java:146)
at com.google.common.io.Files$FileByteSource.openStream(Files.java:126)
at com.google.common.io.Files$FileByteSource.openStream(Files.java:116)
at com.google.common.io.ByteSource$AsCharSource.openStream(ByteSource.java:435)
at com.google.common.io.CharSource.getInput(CharSource.java:94)
at com.google.common.io.CharSource.getInput(CharSource.java:65)
at com.google.common.io.CharStreams.readLines(CharStreams.java:344)
at com.google.common.io.Files.readLines(Files.java:741)
at com.google.common.io.Files.readLines(Files.java:712)
at org.apache.brooklyn.entity.brooklynnode.BrooklynNodeIntegrationTest.testCopiesClasspathEntriesInBrooklynProperties(BrooklynNodeIntegrationTest.java:358)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.testng.internal.MethodInvocationHelper.invokeMethod(MethodInvocationHelper.java:84)
at org.testng.internal.Invoker.invokeMethod(Invoker.java:714)
at org.testng.internal.Invoker.invokeTestMethod(Invoker.java:901)
at org.testng.internal.Invoker.invokeTestMethods(Invoker.java:1231)
at org.testng.internal.TestMethodWorker.invokeTestMethods(TestMethodWorker.java:127)
at org.testng.internal.TestMethodWorker.run(TestMethodWorker.java:111)
at org.testng.TestRunner.privateRun(TestRunner.java:767)
at org.testng.TestRunner.run(TestRunner.java:617)
at org.testng.SuiteRunner.runTest(SuiteRunner.java:348)
at org.testng.SuiteRunner.runSequentially(SuiteRunner.java:343)
at org.testng.SuiteRunner.privateRun(SuiteRunner.java:305)
at org.testng.SuiteRunner.run(SuiteRunner.java:254)
at org.testng.SuiteRunnerWorker.runSuite(SuiteRunnerWorker.java:52)
at org.testng.SuiteRunnerWorker.run(SuiteRunnerWorker.java:86)
at org.testng.TestNG.runSuitesSequentially(TestNG.java:1224)
at org.testng.TestNG.runSuitesLocally(TestNG.java:1149)
at org.testng.TestNG.run(TestNG.java:1057)
at org.apache.maven.surefire.testng.TestNGExecutor.run(TestNGExecutor.java:115)
at org.apache.maven.surefire.testng.TestNGDirectoryTestSuite.executeMulti(TestNGDirectoryTestSuite.java:205)
at org.apache.maven.surefire.testng.TestNGDirectoryTestSuite.execute(TestNGDirectoryTestSuite.java:108)
at org.apache.maven.surefire.testng.TestNGProvider.invoke(TestNGProvider.java:111)
at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:203)
at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:155)
at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:103)
*/
@Test(groups={"Integration","Broken"})
public void testCopiesClasspathEntriesInBrooklynProperties() throws Exception {
String content = "abc=def";
File classpathEntry1 = File.createTempFile("first", ".properties");
File classpathEntry2 = File.createTempFile("second", ".properties");
Files.write(content, classpathEntry1, Charsets.UTF_8);
Files.write(content, classpathEntry2, Charsets.UTF_8);
File tempDir = Files.createTempDir();
File expectedFile1 = new File(new File(tempDir, "lib"), classpathEntry1.getName());
File expectedFile2 = new File(new File(tempDir, "lib"), classpathEntry2.getName());
try {
String propName = BrooklynNode.CLASSPATH.getName();
String propValue = classpathEntry1.toURI().toString() + "," + classpathEntry2.toURI().toString();
((BrooklynProperties)app.getManagementContext().getConfig()).put(propName, propValue);
BrooklynNode brooklynNode = app.createAndManageChild(newBrooklynNodeSpecForTest()
.configure(BrooklynNode.RUN_DIR, tempDir.getAbsolutePath())
);
app.start(locs);
log.info("started "+app+" containing "+brooklynNode+" for "+JavaClassNames.niceClassAndMethod());
assertEquals(Files.readLines(expectedFile1, Charsets.UTF_8), ImmutableList.of(content));
assertEquals(Files.readLines(expectedFile2, Charsets.UTF_8), ImmutableList.of(content));
} finally {
expectedFile1.delete();
expectedFile2.delete();
tempDir.delete();
classpathEntry1.delete();
classpathEntry2.delete();
}
}
// TODO test that the classpath set above is actually used
@Test(groups="Integration")
public void testSetsBrooklynWebConsolePort() throws Exception {
BrooklynNode brooklynNode = app.createAndManageChild(newBrooklynNodeSpecForTest()
.configure(BrooklynNode.HTTP_PORT, PortRanges.fromString("45000+")));
app.start(locs);
log.info("started "+app+" containing "+brooklynNode+" for "+JavaClassNames.niceClassAndMethod());
Integer httpPort = brooklynNode.getAttribute(BrooklynNode.HTTP_PORT);
URI webConsoleUri = brooklynNode.getAttribute(BrooklynNode.WEB_CONSOLE_URI);
assertTrue(httpPort >= 45000 && httpPort < 54100, "httpPort="+httpPort);
assertEquals((Integer)webConsoleUri.getPort(), httpPort);
HttpTestUtils.assertHttpStatusCodeEquals(webConsoleUri.toString(), 200, 401);
}
@Test(groups="Integration")
public void testStartsAppOnStartup() throws Exception {
BrooklynNode brooklynNode = app.createAndManageChild(newBrooklynNodeSpecForTest()
.configure(BrooklynNode.APP, BasicApplicationImpl.class.getName()));
app.start(locs);
log.info("started "+app+" containing "+brooklynNode+" for "+JavaClassNames.niceClassAndMethod());
URI webConsoleUri = brooklynNode.getAttribute(BrooklynNode.WEB_CONSOLE_URI);
waitForApps(webConsoleUri, 1);
String apps = HttpTestUtils.getContent(webConsoleUri.toString()+"/v1/applications");
List<String> appType = parseJsonList(apps, ImmutableList.of("spec", "type"), String.class);
assertEquals(appType, ImmutableList.of(BasicApplication.class.getName()));
}
protected static void waitForApps(String webConsoleUri) {
HttpTestUtils.assertHttpStatusCodeEquals(webConsoleUri+"/v1/applications", 200, 403);
HttpTestUtils.assertHttpStatusCodeEventuallyEquals(webConsoleUri+"/v1/applications", 200);
}
// TODO Should introduce startup stages and let the client select which stage it expects to be complete
protected void waitForApps(final URI webConsoleUri, final int num) {
waitForApps(webConsoleUri.toString());
// e.g. [{"id":"UnBqPcqg","spec":{"name":"Application (UnBqPcqg)","type":"org.apache.brooklyn.entity.stock.BasicApplication","locations":["pOL4NtiW"]},"status":"RUNNING","links":{"self":"/v1/applications/UnBqPcqg","entities":"/v1/applications/UnBqPcqg/entities"}}]
Asserts.succeedsEventually(new Runnable() {
@Override
public void run() {
//Wait all apps to become managed
String appsContent = HttpTestUtils.getContent(webConsoleUri.toString()+"/v1/applications");
List<String> appIds = parseJsonList(appsContent, ImmutableList.of("id"), String.class);
assertEquals(appIds.size(), num);
// and then to start
List<String> statuses = parseJsonList(appsContent, ImmutableList.of("status"), String.class);
for (String status : statuses) {
assertEquals(status, Lifecycle.RUNNING.toString().toUpperCase());
}
}});
}
@Test(groups="Integration")
public void testStartsAppViaEffector() throws Exception {
BrooklynNode brooklynNode = app.createAndManageChild(newBrooklynNodeSpecForTest());
app.start(locs);
log.info("started "+app+" containing "+brooklynNode+" for "+JavaClassNames.niceClassAndMethod());
// note there is also a test for this in DeployApplication
final URI webConsoleUri = brooklynNode.getAttribute(BrooklynNode.WEB_CONSOLE_URI);
waitForApps(webConsoleUri.toString());
final String id = brooklynNode.invoke(BrooklynNode.DEPLOY_BLUEPRINT, ConfigBag.newInstance()
.configure(DeployBlueprintEffector.BLUEPRINT_TYPE, BasicApplication.class.getName())
.getAllConfig()).get();
String apps = HttpTestUtils.getContent(webConsoleUri.toString()+"/v1/applications");
List<String> appType = parseJsonList(apps, ImmutableList.of("spec", "type"), String.class);
assertEquals(appType, ImmutableList.of(BasicApplication.class.getName()));
HttpTestUtils.assertContentEventuallyMatches(
webConsoleUri.toString()+"/v1/applications/"+id+"/entities/"+id+"/sensors/service.state",
"\"?(running|RUNNING)\"?");
}
@Test(groups="Integration")
public void testUsesLocation() throws Exception {
String brooklynPropertiesContents =
"brooklyn.location.named.mynamedloc=localhost:(name=myname)\n"+
//force lat+long so test will work when offline
"brooklyn.location.named.mynamedloc.latitude=123\n"+
"brooklyn.location.named.mynamedloc.longitude=45.6\n";
BrooklynNode brooklynNode = app.createAndManageChild(newBrooklynNodeSpecForTest()
.configure(BrooklynNode.BROOKLYN_LOCAL_PROPERTIES_CONTENTS, brooklynPropertiesContents)
.configure(BrooklynNode.APP, BasicApplicationImpl.class.getName())
.configure(BrooklynNode.LOCATIONS, "named:mynamedloc"));
app.start(locs);
log.info("started "+app+" containing "+brooklynNode+" for "+JavaClassNames.niceClassAndMethod());
URI webConsoleUri = brooklynNode.getAttribute(BrooklynNode.WEB_CONSOLE_URI);
waitForApps(webConsoleUri, 1);
// Check that "mynamedloc" has been picked up from the brooklyn.properties
String locsContent = HttpTestUtils.getContent(webConsoleUri.toString()+"/v1/locations");
List<String> locNames = parseJsonList(locsContent, ImmutableList.of("name"), String.class);
assertTrue(locNames.contains("mynamedloc"), "locNames="+locNames);
// Find the id of the concrete location instance of the app
String appsContent = HttpTestUtils.getContent(webConsoleUri.toString()+"/v1/applications");
List<String[]> appLocationIds = parseJsonList(appsContent, ImmutableList.of("spec", "locations"), String[].class);
String appLocationId = Iterables.getOnlyElement(appLocationIds)[0]; // app.getManagementContext().getLocationRegistry()
// Check that the concrete location is of the required type
String locatedLocationsContent = HttpTestUtils.getContent(webConsoleUri.toString()+"/v1/locations/usage/LocatedLocations");
assertEquals(parseJson(locatedLocationsContent, ImmutableList.of(appLocationId, "name"), String.class), "myname");
assertEquals(parseJson(locatedLocationsContent, ImmutableList.of(appLocationId, "longitude"), Double.class), 45.6, 0.00001);
}
@Test(groups="Integration")
public void testAuthenticationAndHttps() throws Exception {
String adminPassword = "p4ssw0rd";
BrooklynNode brooklynNode = app.createAndManageChild(newBrooklynNodeSpecForTest()
.configure(BrooklynNode.ENABLED_HTTP_PROTOCOLS, ImmutableList.of("https"))
.configure(BrooklynNode.MANAGEMENT_PASSWORD, adminPassword)
.configure(BrooklynNode.BROOKLYN_LOCAL_PROPERTIES_CONTENTS,
Strings.lines(
"brooklyn.webconsole.security.https.required=true",
"brooklyn.webconsole.security.users=admin",
"brooklyn.webconsole.security.user.admin.password="+adminPassword,
"brooklyn.location.localhost.enabled=false") )
);
app.start(locs);
log.info("started "+app+" containing "+brooklynNode+" for "+JavaClassNames.niceClassAndMethod());
URI webConsoleUri = brooklynNode.getAttribute(BrooklynNode.WEB_CONSOLE_URI);
Assert.assertTrue(webConsoleUri.toString().startsWith("https://"), "web console not https: "+webConsoleUri);
Integer httpsPort = brooklynNode.getAttribute(BrooklynNode.HTTPS_PORT);
Assert.assertTrue(httpsPort!=null && httpsPort >= 8443 && httpsPort <= 8500);
Assert.assertTrue(webConsoleUri.toString().contains(""+httpsPort), "web console not using right https port ("+httpsPort+"): "+webConsoleUri);
HttpTestUtils.assertHttpStatusCodeEquals(webConsoleUri.toString(), 401);
HttpClient http = HttpTool.httpClientBuilder()
.trustAll()
.uri(webConsoleUri)
.laxRedirect(true)
.credentials(new UsernamePasswordCredentials("admin", adminPassword))
.build();
HttpToolResponse response = HttpTool.httpGet(http, webConsoleUri, MutableMap.<String,String>of());
Assert.assertEquals(response.getResponseCode(), 200);
}
@Test(groups="Integration")
public void testStopPlainThrowsException() throws Exception {
BrooklynNode brooklynNode = setUpBrooklynNodeWithApp();
// Not using annotation with `expectedExceptions = PropagatedRuntimeException.class` because want to
// ensure exception comes from stop. On jenkins, was seeing setUpBrooklynNodeWithApp fail in
// testStopAndKillAppsEffector; so can't tell if this method was really passing!
try {
brooklynNode.stop();
fail("Expected "+brooklynNode+" stop to fail, because has app");
} catch (Exception e) {
IllegalStateException ise = Exceptions.getFirstThrowableOfType(e, IllegalStateException.class);
if (ise != null && ise.toString().contains("Can't stop instance with running applications")) {
// success
} else {
throw e;
}
} finally {
try {
brooklynNode.invoke(BrooklynNode.STOP_NODE_AND_KILL_APPS, ImmutableMap.of(StopNodeAndKillAppsEffector.TIMEOUT.getName(), Duration.THIRTY_SECONDS)).getUnchecked();
} catch (Exception e) {
log.warn("Error in stopNodeAndKillApps for "+brooklynNode+" (continuing)", e);
}
}
}
@Test(groups="Integration")
public void testStopAndKillAppsEffector() throws Exception {
createNodeAndExecStopEffector(BrooklynNode.STOP_NODE_AND_KILL_APPS);
}
@Test(groups="Integration")
public void testStopButLeaveAppsEffector() throws Exception {
createNodeAndExecStopEffector(BrooklynNode.STOP_NODE_BUT_LEAVE_APPS);
}
@Test(groups="Integration")
public void testStopAndRestartProcess() throws Exception {
persistenceDir = Files.createTempDir();
BrooklynNode brooklynNode = app.createAndManageChild(newBrooklynNodeSpecForTest()
.configure(BrooklynNode.EXTRA_LAUNCH_PARAMETERS, "--persist auto --persistenceDir "+persistenceDir.getAbsolutePath())
.configure(BrooklynNode.APP, BasicApplicationImpl.class.getName()));
app.start(locs);
log.info("started "+app+" containing "+brooklynNode+" for "+JavaClassNames.niceClassAndMethod());
File pidFile = new File(getDriver(brooklynNode).getPidFile());
URI webConsoleUri = brooklynNode.getAttribute(BrooklynNode.WEB_CONSOLE_URI);
waitForApps(webConsoleUri, 1);
// Stop just the process; will not have unmanaged entity unless machine was being terminated
brooklynNode.invoke(BrooklynNode.STOP, ImmutableMap.<String, Object>of(
BrooklynNode.StopSoftwareParameters.STOP_MACHINE_MODE.getName(), StopMode.NEVER,
BrooklynNode.StopSoftwareParameters.STOP_PROCESS_MODE.getName(), StopMode.ALWAYS)).getUnchecked();
assertTrue(Entities.isManaged(brooklynNode));
assertFalse(isPidRunning(pidFile), "pid in "+pidFile+" still running");
// Clear the startup app so it's not started second time, in addition to the rebind state
// TODO remove this once the startup app is created only if no previous persistence state
brooklynNode.config().set(BrooklynNode.APP, (String)null);
((EntityLocal)brooklynNode).sensors().set(BrooklynNode.APP, null);
// Restart the process; expect persisted state to have been restored, so apps still known about
brooklynNode.invoke(BrooklynNode.RESTART, ImmutableMap.<String, Object>of(
BrooklynNode.RestartSoftwareParameters.RESTART_MACHINE.getName(), "false")).getUnchecked();
waitForApps(webConsoleUri.toString());
String apps = HttpTestUtils.getContent(webConsoleUri.toString()+"/v1/applications");
List<String> appType = parseJsonList(apps, ImmutableList.of("spec", "type"), String.class);
assertEquals(appType, ImmutableList.of(BasicApplication.class.getName()));
}
private void createNodeAndExecStopEffector(Effector<?> eff) throws Exception {
BrooklynNode brooklynNode = setUpBrooklynNodeWithApp();
File pidFile = new File(getDriver(brooklynNode).getPidFile());
assertTrue(isPidRunning(pidFile));
brooklynNode.invoke(eff, Collections.<String, Object>emptyMap()).getUnchecked();
// Note can't use driver.isRunning to check shutdown; can't invoke scripts on an unmanaged entity
EntityTestUtils.assertAttributeEquals(brooklynNode, BrooklynNode.SERVICE_UP, false);
// unmanaged if the machine is destroyed - ie false on localhost (this test by default), but true in the cloud
// assertFalse(Entities.isManaged(brooklynNode));
assertFalse(isPidRunning(pidFile), "pid in "+pidFile+" still running");
}
private boolean isPidRunning(File pidFile) throws Exception {
SshMachineLocation machine = loc.obtain();
try {
int result = machine.execScript("check-pid", ImmutableList.of(
"test -f "+pidFile+" || exit 1",
"ps -p `cat "+pidFile+"`"));
return result == 0;
} finally {
loc.release(machine);
Locations.unmanage(machine);
}
}
private BrooklynNodeSshDriver getDriver(BrooklynNode brooklynNode) {
try {
EntityProxyImpl entityProxy = (EntityProxyImpl)Proxy.getInvocationHandler(brooklynNode);
Method getDriver = BrooklynNodeImpl.class.getMethod("getDriver");
return (BrooklynNodeSshDriver)entityProxy.invoke(brooklynNode, getDriver, new Object[]{});
} catch (Throwable e) {
throw Exceptions.propagate(e);
}
}
private BrooklynNode setUpBrooklynNodeWithApp() throws InterruptedException,
ExecutionException {
BrooklynNode brooklynNode = app.createAndManageChild(EntitySpec.create(BrooklynNode.class)
.configure(BrooklynNode.NO_WEB_CONSOLE_AUTHENTICATION, Boolean.TRUE));
app.start(locs);
log.info("started "+app+" containing "+brooklynNode+" for "+JavaClassNames.niceClassAndMethod());
EntityTestUtils.assertAttributeEqualsEventually(brooklynNode, BrooklynNode.SERVICE_UP, true);
String baseUrl = brooklynNode.getAttribute(BrooklynNode.WEB_CONSOLE_URI).toString();
waitForApps(baseUrl);
final String id = brooklynNode.invoke(BrooklynNode.DEPLOY_BLUEPRINT, ConfigBag.newInstance()
.configure(DeployBlueprintEffector.BLUEPRINT_TYPE, BasicApplication.class.getName())
.getAllConfig()).get();
String entityUrl = Urls.mergePaths(baseUrl, "v1/applications/", id, "entities", id);
Entity mirror = brooklynNode.addChild(EntitySpec.create(BrooklynEntityMirror.class)
.configure(BrooklynEntityMirror.MIRRORED_ENTITY_URL, entityUrl)
.configure(BrooklynEntityMirror.MIRRORED_ENTITY_ID, id));
assertEquals(brooklynNode.getChildren().size(), 1);
return brooklynNode;
}
private <T> T parseJson(String json, List<String> elements, Class<T> clazz) {
Function<String, T> func = Functionals.chain(
JsonFunctions.asJson(),
JsonFunctions.walk(elements),
JsonFunctions.cast(clazz));
return func.apply(json);
}
private <T> List<T> parseJsonList(String json, List<String> elements, Class<T> clazz) {
Function<String, List<T>> func = Functionals.chain(
JsonFunctions.asJson(),
JsonFunctions.forEach(Functionals.chain(
JsonFunctions.walk(elements),
JsonFunctions.cast(clazz))));
return func.apply(json);
}
}
|
apache/harmony | 37,763 | classlib/modules/swing/src/main/java/common/javax/swing/text/DefaultCaret.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Evgeniya G. Maenkova
*/
package javax.swing.text;
import java.awt.Color;
import java.awt.Component;
import java.awt.EventQueue;
import java.awt.Graphics;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.event.MouseMotionListener;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.EventListener;
import javax.swing.AbstractAction;
import javax.swing.SwingConstants;
import javax.swing.SwingUtilities;
import javax.swing.Timer;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import javax.swing.event.EventListenerList;
import javax.swing.text.Position.Bias;
import org.apache.harmony.awt.text.AWTTextAction;
import org.apache.harmony.awt.text.ActionNames;
import org.apache.harmony.awt.text.ActionSet;
import org.apache.harmony.awt.text.TextKit;
import org.apache.harmony.awt.text.TextUtils;
import org.apache.harmony.x.swing.StringConstants;
import org.apache.harmony.x.swing.internal.nls.Messages;
public class DefaultCaret extends Rectangle implements Caret, FocusListener,
MouseListener, MouseMotionListener {
public static final int ALWAYS_UPDATE = 2;
public static final int NEVER_UPDATE = 1;
public static final int UPDATE_WHEN_ON_EDT = 0;
protected EventListenerList listenerList = new EventListenerList();
protected transient ChangeEvent changeEvent = new ChangeEvent(this);
private int mark;
private int dot;
private transient Position.Bias dotBias = Position.Bias.Forward;
private transient Position.Bias markBias = Position.Bias.Forward;
private boolean async;
//Position in document to follow the adjucent caret position
//on document updates. Help to get new bias (dot) after
//document updates.
private int dotTrack;
//current policy (for 1.5.0 sets by setUpdatePolicy)
private int selectedPolicy = UPDATE_WHEN_ON_EDT;
private int blinkRate;
//installed text component
private Component component;
TextKit textKit;
//current selection color
private Color selectionColor;
//current caret color
private Color caretColor;
//value selectionColor in case of JTextComponent.getSelectionColor()
//equals null
private static final Color DEF_SEL_COLOR = new Color(192, 224, 255);
//value selectionColor in case of JTextComponent.getCaretColor()
//equals null
private static final Color DEF_CARET_COLOR = new Color(0, 0, 0);
//delay for magicTimer
static final int DEFAULT_MAGIC_DELAY = 600;
private static final int APEX_NUMBER = 3;
private static final int TRIANGLE_HEIGHT = 4;
private static final int RIGHT_TRIANGLE_WIDTH = 5;
private static final int LEFT_TRIANGLE_WIDTH = 4;
//current painter for selection
private transient DefaultHighlighter.DefaultHighlightPainter painter;
//reference to current selection, if any
private transient Object selectionTag;
//DocumentListener for document of current JTextComponent
private transient DocumentListener dh = new DocumentHandler();
//PropertyChangeListener for current JTextComponent
private transient PropertyHandler pch;
//used by mouseClicked method
private static final transient AWTTextAction SELECT_WORD_ACTION =
ActionSet.actionMap.get(ActionNames.selectWordAction);
//used by mouseClicked method
private static final transient AWTTextAction SELECT_LINE_ACTION =
ActionSet.actionMap.get(ActionNames.selectLineAction);
Point magicCaretPosition;
private transient boolean isVisible;
private boolean isSelectionVisible;
//Timer to repaint caret according to current blinkRate
transient Object blinkTimer;
//Timer to set magicCaretPosition to current caret position
transient Object magicTimer;
//defines whether caret be painting or blinking, if blink on
transient boolean shouldDraw = true;
//defines x coordinates of flag, in case of bidirectional text
private transient int[] triangleX = new int[APEX_NUMBER];
//defines y coordinates of flag, in case of bidirectional text
private transient int[] triangleY = new int[APEX_NUMBER];
//used for modelToView calls
private transient Position.Bias[] bias = new Position.Bias[1];
//for DnD support, selection doesn't change when drag
private boolean handleMouseDrag = true;
//flag to restore selection
private boolean restoreSelection;
//used when JTextComponent has NavigationFilter
private transient FilterBypass filterBypass = new FilterBypass(this);
private transient NavigationFilter navigationFilter;
private Highlighter highlighter;
private Document document;
private boolean isBidiDocument;
//This variable remembers last coordinates, where mouse button
//was pressed. Handling MouseEvent in MouseClicked method depends
//on this.
private int[] lastPressPoint = new int[2];
//action for blinkTimer
Object blinkAction;
//Action for magic timer. If MagicCaretPosition still null,
//MagicCaretPosition set to current caret position, else do nothing.
Object magicAction;
private class PropertyHandler implements PropertyChangeListener {
public void propertyChange(final PropertyChangeEvent evt) {
String proptertyName = evt.getPropertyName();
Object newValue = evt.getNewValue();
if (StringConstants.TEXT_COMPONENT_DOCUMENT_PROPERTY
.equals(proptertyName)) {
Document oldDoc = (Document)evt.getOldValue();
document = textKit.getDocument();
if (oldDoc != null) {
oldDoc.removeDocumentListener(dh);
}
if (document != null) {
updateBidiInfo();
document.addDocumentListener(dh);
setDot(0);
}
} else if (StringConstants.TEXT_COMPONENT_CARET_COLOR_PROPERTY
.equals(proptertyName)) {
caretColor = (Color)newValue;
} else if (StringConstants.TEXT_COMPONENT_SELECTION_COLOR_PROPERTY
.equals(proptertyName)) {
selectionColor = (Color)newValue;
painter = new DefaultHighlighter.DefaultHighlightPainter(
selectionColor);
} else if (StringConstants.TEXT_COMPONENT_HIGHLIGHTER_PROPERTY
.equals(proptertyName)) {
highlighter = (Highlighter)newValue;
} else if (StringConstants.TEXT_COMPONENT_NAV_FILTER_NAME
.equals(proptertyName)) {
navigationFilter = (NavigationFilter)newValue;
}
}
}
//DocumentListener, to change dot, dotBias, mark, current selection on
// document updates
//according to AsynchronousMovement property.
//Don't repaint caret, JText component do painting on document updates.
private class DocumentHandler implements DocumentListener {
/*
* Returns position after string removing (length - length of string,
* offset position where string is removed). Pos - position, which
* should new position be defined for
*/
private int newPosOnRemove(final int pos, final int length,
final int offset) {
int endUpdate = offset + length;
if ((pos > offset) && (endUpdate <= pos)) {
return pos - length;
}
if ((pos > offset) && (endUpdate > pos)) {
return offset;
}
return pos;
}
/*
* Returns position after string inserting (length - length of string,
* offset position where string is added). Pos - position, which
* should new position be defined for
*/
private int newPosOnInsert(final int pos, final int length,
final int offset) {
if (offset <= pos) {
return pos + length;
}
return pos;
}
/**
* Caret position don't change on changeUpdate of Document
*/
public void changedUpdate(final DocumentEvent e) {
}
public synchronized void removeUpdate(final DocumentEvent e) {
final int docLength = document.getLength();
final boolean condOnNeverUpdate =
(docLength <= Math.max(dot, mark));
final boolean isNeverUpdate = (selectedPolicy == NEVER_UPDATE);
final boolean isUpdateWhenOnEdt =
(selectedPolicy == UPDATE_WHEN_ON_EDT);
final boolean isUpdateWhenOnEdtAndAsync =
(isUpdateWhenOnEdt && !EventQueue.isDispatchThread());
if ((isNeverUpdate || isUpdateWhenOnEdtAndAsync)
&& !condOnNeverUpdate) {
return;
}
removeUpdate(e, isNeverUpdate || isUpdateWhenOnEdtAndAsync,
docLength);
}
private void removeUpdate(final DocumentEvent e,
final boolean trivialUpdate,
final int docLength) {
int length = e.getLength();
int offset = e.getOffset();
int newMark;
int newDot;
Position.Bias newBias;
if (trivialUpdate) {
newMark = Math.min(mark, docLength);
newDot = Math.min(dot, docLength);
newBias = dotBias;
} else {
newMark = newPosOnRemove(mark, length, offset);
dotTrack = newPosOnRemove(dotTrack, length, offset);
newDot = newPosOnRemove(dot, length, offset);
newBias = getNewBias(dotTrack, newDot);
}
mark = newMark;
moveDot(newDot, newBias);
if (dot == mark) {
markBias = dotBias;
}
}
public synchronized void insertUpdate(final DocumentEvent e) {
if ((selectedPolicy == NEVER_UPDATE)
|| ((selectedPolicy == UPDATE_WHEN_ON_EDT) &&
!EventQueue.isDispatchThread())) {
return;
}
int length = e.getLength();
int offset = e.getOffset();
String s = null;
try {
s = document.getText(offset, 1);
} catch (final BadLocationException ex) {
}
if (offset == dot && (length + offset) != document.getLength()) {
dotBias = Position.Bias.Backward;
}
if (s.equals("\n")) {
dotBias = Position.Bias.Forward;
}
mark = newPosOnInsert(mark, length, offset);
moveDot(newPosOnInsert(dot, length, offset), dotBias);
if (dot == mark) {
markBias = dotBias;
}
updateBidiInfo();
}
}
/**
* Default Implementation if NavigationFilter.FilterBypass. Used in setDot()
* and moveDot(), when component.getNavigationFilter doesn't equal null.
*/
private class FilterBypass extends NavigationFilter.FilterBypass {
DefaultCaret caret;
FilterBypass(final DefaultCaret dc) {
caret = dc;
}
@Override
public Caret getCaret() {
return caret;
}
@Override
public void setDot(final int i, final Bias b) {
caret.internalSetDot(i, b);
}
@Override
public void moveDot(final int i, final Bias b) {
caret.internalMoveDot(i, b);
}
}
/**
* Sets all fiels to default values
*/
public DefaultCaret() {
blinkTimer = createTimer(false, 0);
magicTimer = createTimer(true, 0);
painter = new DefaultHighlighter.DefaultHighlightPainter(
selectionColor);
}
public void addChangeListener(final ChangeListener changeListener) {
if (changeListener != null) {
listenerList.add(ChangeListener.class, changeListener);
}
}
/**
* Adds selection according to current dot and mark, if isSelectionVisible
* equals true.
*
*/
private void addHighlight() {
if (mark == dot) {
return;
}
if (!isSelectionVisible) {
restoreSelection = true;
removeHighlight();
return;
}
if (selectionTag == null) {
selectionTag = addHighlight(Math.min(dot, mark),
Math.max(dot, mark));
}
}
protected void adjustVisibility(final Rectangle r) {
if (r != null) {
textKit.scrollRectToVisible(new Rectangle(r.x, r.y, r.width + 1, r.height));
}
}
/**
* Repaint caret according to current dot and dotBias.
*
*/
private void calcNewPos() {
Rectangle p = null;
try {
p = textKit.modelToView(dot, dotBias);
} catch (final BadLocationException e) {
}
if (p == null) {
return;
}
damage(p);
}
/**
* Sets dot to new position i, repaint caret, sets magic caret position to
* null. Calls fireStateChanged.
*
* @param i new dot
*/
private void changeDot(final int i) {
dot = i;
magicCaretPosition = null;
shouldDraw = true;
dotTrack = changePosTrack(dot, dotBias);
calcNewPos();
fireStateChanged();
}
/**
* Changes current selection, if any. Removes current selection, if dot
* equals mark.
*/
private void changeHighlight() {
if (dot == mark) {
if (selectionTag != null) {
removeHighlight();
}
} else {
if (selectionTag == null) {
addHighlight();
} else {
changeHighlight(selectionTag, Math.min(dot, mark),
Math.max(dot, mark));
}
}
}
/*
* Calculate new adjacent position value according to pos and bias.
*
* @param newPos offset, which should new adjucent position be defined for
* @param newBias bias of pos
* @return new adjucent position
*/
private int changePosTrack(final int newPos, final Position.Bias newBias) {
if (newBias == Position.Bias.Forward) {
return newPos + 1;
} else {
return newPos;
}
}
protected synchronized void damage(final Rectangle r) {
repaint();
if (r == null) {
return;
}
x = r.x;
y = r.y;
width = 0;
height = r.height - 2;
adjustVisibility(r);
repaint();
}
/**
* Stops timer for blinking.
*
*/
public void deinstall(final JTextComponent comp) {
if (component == null || comp != component) {
return;
}
if (document != null) {
document.removeDocumentListener(dh);
}
component.removePropertyChangeListener(pch);
component.removeMouseListener(this);
component.removeMouseMotionListener(this);
component.removeFocusListener(this);
stopTimer(blinkTimer);
stopTimer(magicTimer);
highlighter = null;
component = null;
textKit = null;
}
@Override
public boolean equals(final Object obj) {
return this == obj;
}
protected void fireStateChanged() {
if (isVisible) {
TextUtils.setNativeCaretPosition(this, component);
}
Object[] listeners = listenerList.getListenerList();
for (int i = listeners.length - 2; i >= 0; i -= 2) {
if (listeners[i] == ChangeListener.class) {
if (changeEvent == null) {
changeEvent = new ChangeEvent(this);
}
((ChangeListener)listeners[i + 1]).stateChanged(changeEvent);
}
}
}
/**
* Sets isSelectionVisible to true
*
*/
public void focusGained(final FocusEvent fe) {
isSelectionVisible = true;
if (restoreSelection) {
addHighlight();
restoreSelection = false;
}
if (isComponentEditable()) {
setVisible(true);
repaint();
} else {
setVisible(false);
}
}
/**
* Sets isSelectionVisible to true
*
*/
public void focusLost(final FocusEvent fe) {
setVisible(false);
isSelectionVisible = true;
Component c = fe.getOppositeComponent();
if (c != null
&& !fe.isTemporary()
&& isRestoreSelectionCondition(c)) {
restoreSelection = true;
removeHighlight();
}
repaint();
}
private boolean getAsynchronousMovement() {
return async;
}
public int getBlinkRate() {
return blinkRate;
}
public ChangeListener[] getChangeListeners() {
return listenerList.getListeners(ChangeListener.class);
}
protected final JTextComponent getComponent() {
return component instanceof JTextComponent
? (JTextComponent)component : null;
}
/*
* Defines flag direction, to paint caret in case of bidirectional text
*
* @return true - if flag in the left direction, false - if flag in the
* right direction
*/
private boolean getDirection() {
AbstractDocument ad = ((AbstractDocument)document);
int length = ad.getLength();
boolean currentDirection = (dot >= 1) ? ad.isLeftToRight(dot - 1)
: false;
boolean nextDirection = (dot <= length) ? ad.isLeftToRight(dot) : true;
if (currentDirection == nextDirection) {
return (currentDirection) ? false : true;
}
if (currentDirection) {
return (dotBias == Position.Bias.Backward) ? false : true;
} else {
return (dotBias == Position.Bias.Backward) ? true : false;
}
}
public int getDot() {
return dot;
}
/**
* Returns current dot bias
*
* @return dotBias dot bias
*/
Position.Bias getDotBias() {
return dotBias;
}
public <T extends EventListener> T[] getListeners(final Class<T> c) {
T[] evL = null;
try {
evL = listenerList.getListeners(c);
} catch (final ClassCastException e) {
throw e;
}
return evL;
}
public Point getMagicCaretPosition() {
return magicCaretPosition;
//return stubMagicCaretPosition;
}
public int getMark() {
return mark;
}
/**
* Get new bias for current position and its adjucent position
*
* @param posTrack adjucent position
* @param pos current position
* @return bias
*/
private Position.Bias getNewBias(final int posTrack, final int pos) {
return (posTrack == pos) ? Position.Bias.Backward
: Position.Bias.Forward;
}
protected Highlighter.HighlightPainter getSelectionPainter() {
return painter;
}
public int getUpdatePolicy() {
return selectedPolicy;
}
/**
* Adds listeners to c, if c doesn't equal null.
* Adds DocumentListener to
* c, if c doesn't equals null. Sets textUI, caretColor, selectionColor to
* value from c, if they don't equal null. Sets new painter according to
* selectionColor.
*/
public void install(final JTextComponent c) {
setComponent(c);
component.addMouseListener(this);
component.addMouseMotionListener(this);
component.addFocusListener(this);
component.addPropertyChangeListener(getPropertyHandler());
highlighter = c.getHighlighter();
navigationFilter = c.getNavigationFilter();
painter = new DefaultHighlighter.DefaultHighlightPainter(
selectionColor);
}
void setComponent(final Component c) {
component = c;
textKit = TextUtils.getTextKit(component);
document = textKit.getDocument();
updateBidiInfo();
if (document != null) {
document.addDocumentListener(dh);
}
selectionColor = getSelectionColor();
caretColor = getCaretColor();
}
public boolean isActive() {
//return shouldDraw;
return isVisible;
}
public boolean isSelectionVisible() {
return isSelectionVisible;
}
public boolean isVisible() {
return isVisible;
}
//MouseClicked is called if mouse button was released farther
//than 5 pixels (from place, where mouse button was pressed).
//Sometimes, it is not enough. For example, when width of
//a letter is smaller than 5 pixels.
//So it's necessarily to filter these mouse events.
private boolean needClick(final MouseEvent e) {
return selectionTag == null
|| (Math.abs(e.getX() - lastPressPoint[0]) > 1 && Math
.abs(e.getY() - lastPressPoint[1]) > 1);
}
public void mouseClicked(final MouseEvent me) {
if (!needClick(me)) {
return;
}
int clickCount = me.getClickCount();
if (me.getButton() == MouseEvent.BUTTON1) {
if (clickCount == 1) {
positionCaret(me);
}
if (clickCount == 2) {
SELECT_WORD_ACTION.performAction(textKit);
}
if (clickCount == 3) {
SELECT_LINE_ACTION.performAction(textKit);
}
}
}
/**
* Calculates offset, which correspond MouseEvent. If DragAndDropCondition
* return false, moveDot is called.
*/
public void mouseDragged(final MouseEvent me) {
int offset = textKit.viewToModel(new Point(me.getX(), me
.getY()), bias);
if (offset < 0) {
return;
}
int mask = MouseEvent.BUTTON1_DOWN_MASK;
if ((me.getModifiersEx() & mask) != mask) {
return;
}
if (handleMouseDrag) { //(! DragAndDropCondition(offset))
moveDot(offset, bias[0]);
}
}
/**
* If component.getDragEnabled() returns true and offset in selection,
* return true. Otherwise, returns false.
*/
private boolean isDragAndDropCondition(final int offset) {
return isDragEnabled()
? (offset >= Math.min(dot, mark) && offset <= Math.max(dot, mark))
: false;
}
public void mouseEntered(final MouseEvent me) {
}
public void mouseExited(final MouseEvent me) {
}
public void mouseMoved(final MouseEvent me) {
}
/**
* Calculates offset, which corresponds to MouseEvent. If shift-button isn't
* pressed, DragAndDropCondition return false, then setDot is called.
*/
public void mousePressed(final MouseEvent me) {
int offset;
int mask = MouseEvent.SHIFT_DOWN_MASK;
if (me.getButton() != MouseEvent.BUTTON1 || !component.isEnabled()) {
return;
}
component.requestFocusInWindow();
offset = textKit.viewToModel(new Point(me.getX(), me.getY()),
bias);
if (offset < 0) {
return;
}
rememberPressPoint(me);
if ((me.getModifiersEx() & mask) == mask) {
moveDot(offset, bias[0]);
} else {
boolean condition = isDragAndDropCondition(offset);
if (!condition) {
setDot(offset, bias[0]);
}
handleMouseDrag = !(condition && dot != mark);
}
}
private void rememberPressPoint(final MouseEvent e) {
lastPressPoint[0] = e.getX();
lastPressPoint[1] = e.getY();
}
public void mouseReleased(final MouseEvent me) {
}
protected void moveCaret(final MouseEvent me) {
int offset = textKit.viewToModel(new Point(me.getX(), me
.getY()), bias);
if (offset >= 0) {
moveDot(offset);
}
}
/**
* Calls changeDot, don't change mark. Adds or changes highlight, it depends
* on current dot and mark
*
*/
public void moveDot(final int i) {
moveDot(i, Position.Bias.Forward);
}
/**
* If current JTextComponent has NavigationFilter then call
* getComponent.getNavigationFilter.moveDot. Otherwise, calls changeDot,
* don't change mark. Sets new dot, new dotBias. Adds or changes highlight,
* it dependes on current dot and mark
*
* @param i new dot
* @param b new dot bias
*/
void moveDot(final int i, final Position.Bias b) {
if (navigationFilter == null) {
internalMoveDot(i, b);
} else {
navigationFilter.moveDot(filterBypass, i, b);
}
}
private void internalMoveDot(final int i, final Position.Bias b) {
dotBias = b;
changeDot(i);
changeHighlight();
}
public void paint(final Graphics g) {
try {
if (!isVisible || !shouldDraw) {
return;
}
Rectangle p = textKit.modelToView(dot, dotBias);
if (p == null) {
return;
}
this.setBounds(p.x, p.y, 0, p.height - 2);
g.setColor(caretColor);
g.drawRect(x, y, width, height);
if (isBidiDocument) {
triangleX[0] = x;
triangleX[1] = x;
triangleX[2] = getDirection() ? (x - LEFT_TRIANGLE_WIDTH)
: (x + RIGHT_TRIANGLE_WIDTH);
triangleY[0] = y;
triangleY[1] = y + TRIANGLE_HEIGHT;
triangleY[2] = y;
g.fillPolygon(triangleX, triangleY, APEX_NUMBER);
}
} catch (BadLocationException e) {
}
}
protected void positionCaret(final MouseEvent me) {
int offset = textKit.viewToModel(new Point(me.getX(), me
.getY()), bias);
if (offset >= 0) {
setDot(offset, bias[0]);
}
}
/**
* Reads object by default, reads string representation dotBias, markBias,
* sets dotBias, markBias. Sets blinkTimer, magicTimer, textUI, dh, pch,
* selectWord, selectLine, triangleX, triangleY, bias.
*
* @param s
* @throws IOException
* @throws ClassNotFoundException
*/
private void readObject(final ObjectInputStream s) throws IOException,
ClassNotFoundException {
s.defaultReadObject();
String forwardString = Position.Bias.Forward.toString();
dotBias = (forwardString.equals(s.readUTF())) ? Position.Bias.Forward
: Position.Bias.Backward;
markBias = (forwardString.equals(s.readUTF())) ? Position.Bias.Forward
: Position.Bias.Backward;
painter = new DefaultHighlighter.DefaultHighlightPainter(
selectionColor);
dh = new DocumentHandler();
pch = new PropertyHandler();
if (component != null) {
component.addPropertyChangeListener(pch);
if (textKit.getDocument() != null) {
textKit.getDocument().addDocumentListener(dh);
}
}
blinkTimer = createTimer(false, 0);
magicTimer = createTimer(true, 0);
triangleX = new int[APEX_NUMBER];
triangleY = new int[APEX_NUMBER];
bias = new Position.Bias[1];
}
public void removeChangeListener(final ChangeListener chL) {
listenerList.remove(ChangeListener.class, chL);
}
/**
* Removes current selection, selectionTag doesn't equal null. Sets
* selectionTag to null.
*/
private void removeHighlight() {
if (selectionTag == null) {
return;
}
removeHighlight(selectionTag);
selectionTag = null;
}
protected final synchronized void repaint() {
if (component != null) {
if (!isBidiDocument) {
//Commented since current 2d implementation cannot properly repaint region
// of 1 pixel width.
//component.repaint(0, x, y, width + 1, height + 1);
component.repaint(0, x-2, y, width + 4, height + 1);
} else {
component.repaint(0, x - 5, y, width + 10, height + 1);
}
}
}
final void setAsynchronousMovement(final boolean b) {
async = b;
int i = (b) ? DefaultCaret.ALWAYS_UPDATE
: DefaultCaret.UPDATE_WHEN_ON_EDT;
setUpdatePolicy(i);
}
/**
* Restarts timer for blinking, if blink on
*
*/
public void setBlinkRate(final int i) {
if (i < 0) {
throw new IllegalArgumentException(Messages.getString("swing.64", i)); //$NON-NLS-1$
}
blinkRate = i;
stopTimer(blinkTimer);
if (blinkRate > 0) {
setTimerDelay(blinkTimer, blinkRate);
if (isVisible) {
startTimer(blinkTimer);
}
}
}
public void setDot(final int i) {
setDot(i, Position.Bias.Forward);
return;
}
/**
* If current JTextComponent has NavigationFilter then call
* getComponent.getNavigationFilter.setDot. Otherwise, sets dot and mark to
* i, sets dotBias and markBias to b. Removes highlight, if any.
*
* @param i new dot
* @param b new dotBias
*/
void setDot(final int i, final Position.Bias b) {
if (navigationFilter == null) {
internalSetDot(i, b);
} else {
navigationFilter.setDot(filterBypass, i, b);
}
}
private void internalSetDot(final int i, final Position.Bias b) {
dotBias = b;
markBias = b;
mark = i;
changeDot(i);
removeHighlight();
}
public void setMagicCaretPosition(final Point p) {
magicCaretPosition = p;
//stubMagicCaretPosition = p;
}
public void setSelectionVisible(final boolean b) {
isSelectionVisible = b;
if (b) {
addHighlight();
restoreSelection = false;
} else {
restoreSelection = true;
removeHighlight();
}
}
public void setUpdatePolicy(final int policy) {
if (policy >= 0 && policy <= 2) {
selectedPolicy = policy;
} else {
throw new IllegalArgumentException();
}
}
public void setVisible(final boolean b) {
isVisible = b;
if (b) {
startTimer(magicTimer);
if (blinkRate > 0) {
startTimer(blinkTimer);
}
} else {
stopTimer(blinkTimer);
stopTimer(magicTimer);
}
}
/*
* The format of the string is based on 1.5 release behavior
* of BasicTextUI.BasicCaret class which can be revealed
* using the following code:
*
* JTextArea textArea = new JTextArea();
* System.out.println(textArea.getCaret());
* System.out.println(textArea.getCaret().getClass().getName());
*/
@Override
public String toString() {
return "Dot=(" + dot + ", " + dotBias.toString() + ") " + "Mark=("
+ mark + ", " + markBias.toString() + ")";
}
private void updateBidiInfo() {
isBidiDocument = TextUtils.isBidirectional(document);
}
/**
* Writes object bu default, writes string representation dotBias, markBias
*
* @param s
* @throws IOException
*/
private void writeObject(final ObjectOutputStream s) throws IOException {
s.defaultWriteObject();
s.writeUTF(dotBias.toString());
s.writeUTF(markBias.toString());
}
private PropertyChangeListener getPropertyHandler() {
if (pch == null) {
pch = new PropertyHandler();
}
return pch;
}
Object createTimer(final boolean isMagicTimer, final int delay) {
return isMagicTimer ? new javax.swing.Timer(DEFAULT_MAGIC_DELAY,
(ActionListener)getMagicAction())
: new javax.swing.Timer(delay, (ActionListener)getBlinkAction());
}
void startTimer(final Object timer) {
((Timer)timer).start();
}
void setTimerDelay(final Object timer, final int delay) {
((Timer)timer).setDelay(delay);
}
void stopTimer(final Object timer) {
((javax.swing.Timer)timer).stop();
}
Object getMagicAction() {
if (magicAction == null) {
magicAction = new AbstractAction() {
public void actionPerformed(final ActionEvent e) {
if (magicCaretPosition == null) {
magicCaretPosition = new Point(x, y);
}
}
};
}
return magicAction;
}
Object getBlinkAction() {
if (blinkAction == null) {
blinkAction = new AbstractAction() {
public void actionPerformed(final ActionEvent e) {
shouldDraw = !shouldDraw;
EventQueue.invokeLater(new Runnable() {
public void run() {
repaint();
}
});
}
};
}
return blinkAction;
}
boolean isRestoreSelectionCondition(final Component c) {
return SwingUtilities.windowForComponent(c) == SwingUtilities
.windowForComponent(component);
}
Color getCaretColor() {
JTextComponent textComponent = getComponent();
Color componentsColor = textComponent.getCaretColor();
return componentsColor != null ? componentsColor : DEF_CARET_COLOR;
}
Color getSelectionColor() {
JTextComponent textComponent = getComponent();
Color componentsColor = textComponent.getSelectionColor();
return componentsColor != null ? componentsColor : DEF_SEL_COLOR;
}
boolean isComponentEditable() {
return ((JTextComponent)component).isEditable() && component.isEnabled();
}
boolean isDragEnabled() {
return ((JTextComponent)component).getDragEnabled();
}
Object addHighlight(final int p0, final int p1) {
if (highlighter != null && component.isEnabled()) {
Object result = null;
try {
result = highlighter.addHighlight(p0, p1, painter);
} catch (BadLocationException e) {
}
return result;
} else {
return null;
}
}
void changeHighlight(final Object tag, final int p0, final int p1) {
if (highlighter != null) {
try {
highlighter.changeHighlight(tag, p0, p1);
} catch (final BadLocationException e) {
}
}
}
void removeHighlight(final Object tag) {
if (highlighter != null) {
highlighter.removeHighlight(tag);
}
}
void setMagicCaretPosition(final int pos,
final int direction,
final Point oldPoint) {
try {
Point newPoint = null;
if (direction == SwingConstants.SOUTH
|| direction == SwingConstants.NORTH) {
if (oldPoint == null) {
Rectangle r =
textKit.modelToView(pos,
Position.Bias.Forward).getBounds();
newPoint = new Point(r.x, r.y);
} else {
newPoint = oldPoint;
}
}
setMagicCaretPosition(newPoint);
} catch (BadLocationException e) {
e.printStackTrace();
}
}
}
|
apache/nifi-minifi | 37,883 | minifi-nar-bundles/minifi-framework-bundle/minifi-framework/minifi-framework-core/src/test/java/org/apache/nifi/minifi/status/StatusConfigReporterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.minifi.status;
import org.apache.nifi.components.ValidationResult;
import org.apache.nifi.controller.ComponentNode;
import org.apache.nifi.controller.FlowController;
import org.apache.nifi.controller.ProcessorNode;
import org.apache.nifi.controller.ReportingTaskNode;
import org.apache.nifi.controller.ScheduledState;
import org.apache.nifi.controller.service.ControllerServiceNode;
import org.apache.nifi.controller.service.ControllerServiceState;
import org.apache.nifi.controller.status.ConnectionStatus;
import org.apache.nifi.controller.status.ProcessGroupStatus;
import org.apache.nifi.controller.status.ProcessorStatus;
import org.apache.nifi.controller.status.RemoteProcessGroupStatus;
import org.apache.nifi.controller.status.RunStatus;
import org.apache.nifi.controller.status.TransmissionStatus;
import org.apache.nifi.diagnostics.GarbageCollection;
import org.apache.nifi.diagnostics.StorageUsage;
import org.apache.nifi.diagnostics.SystemDiagnostics;
import org.apache.nifi.groups.ProcessGroup;
import org.apache.nifi.groups.RemoteProcessGroup;
import org.apache.nifi.minifi.commons.status.FlowStatusReport;
import org.apache.nifi.remote.RemoteGroupPort;
import org.apache.nifi.reporting.Bulletin;
import org.apache.nifi.reporting.BulletinQuery;
import org.apache.nifi.reporting.BulletinRepository;
import org.junit.Before;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import static org.apache.nifi.minifi.commons.status.util.StatusReportPopulator.addConnectionStatus;
import static org.apache.nifi.minifi.commons.status.util.StatusReportPopulator.addControllerServiceStatus;
import static org.apache.nifi.minifi.commons.status.util.StatusReportPopulator.addExpectedRemoteProcessGroupStatus;
import static org.apache.nifi.minifi.commons.status.util.StatusReportPopulator.addInstanceStatus;
import static org.apache.nifi.minifi.commons.status.util.StatusReportPopulator.addProcessorStatus;
import static org.apache.nifi.minifi.commons.status.util.StatusReportPopulator.addReportingTaskStatus;
import static org.apache.nifi.minifi.commons.status.util.StatusReportPopulator.addSystemDiagnosticStatus;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class StatusConfigReporterTest {
private FlowController mockFlowController;
private ProcessGroupStatus rootGroupStatus;
private BulletinRepository bulletinRepo;
private ProcessGroup processGroup;
@Before
public void setup() {
mockFlowController = mock(FlowController.class);
rootGroupStatus = mock(ProcessGroupStatus.class);
bulletinRepo = mock(BulletinRepository.class);
processGroup = mock(ProcessGroup.class);
when(mockFlowController.getRootGroupId()).thenReturn("root");
when(mockFlowController.getGroupStatus("root")).thenReturn(rootGroupStatus);
when(mockFlowController.getControllerStatus()).thenReturn(rootGroupStatus);
when(mockFlowController.getBulletinRepository()).thenReturn(bulletinRepo);
when(mockFlowController.getGroup(mockFlowController.getRootGroupId())).thenReturn(processGroup);
}
@Test
public void processorStatusHealth() throws Exception {
populateProcessor(false, false);
String statusRequest = "processor:all:health";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addProcessorStatus(expected, true, false, false, false, false);
assertEquals(expected, actual);
}
@Test
public void individualProcessorStatusHealth() throws Exception {
populateProcessor(false, false);
String statusRequest = "processor:UpdateAttributeProcessorId:health";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addProcessorStatus(expected, true, false, false, false, false);
assertEquals(expected, actual);
}
@Test
public void processorStatusWithValidationErrors() throws Exception {
populateProcessor(true, false);
String statusRequest = "processor:all:health";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addProcessorStatus(expected, true, true, false, false, false);
assertEquals(expected, actual);
}
@Test
public void processorStatusAll() throws Exception {
populateProcessor(true, true);
String statusRequest = "processor:all:health, stats, bulletins";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addProcessorStatus(expected, true, true, true, true, true);
assertEquals(expected, actual);
}
@Test
public void connectionStatusHealth() throws Exception {
populateConnection();
String statusRequest = "connection:all:health";
FlowStatusReport status = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addConnectionStatus(expected, true, false);
assertEquals(expected, status);
}
@Test
public void connectionStatusAll() throws Exception {
populateConnection();
String statusRequest = "connection:all:health, stats";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addConnectionStatus(expected, true, true);
assertEquals(expected, actual);
}
@Test
public void connectionAndProcessorStatusHealth() throws Exception {
populateConnection();
populateProcessor(false, false);
String statusRequest = "connection:connectionId:health; processor:UpdateAttributeProcessorId:health";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
addConnectionStatus(expected, true, false);
addProcessorStatus(expected, true, false, false, false, false);
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
assertEquals(expected, actual);
}
@Test
public void provenanceReportingTaskStatusHealth() throws Exception {
populateReportingTask(false, false);
String statusRequest = "provenanceReporting:health";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addReportingTaskStatus(expected, true, false, false, false);
assertEquals(expected, actual);
}
@Test
public void provenanceReportingTaskStatusBulletins() throws Exception {
populateReportingTask(true, false);
String statusRequest = "provenanceReporting:bulletins";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addReportingTaskStatus(expected, false, false, true, true);
assertEquals(expected, actual);
}
@Test
public void provenanceReportingTaskStatusAll() throws Exception {
populateReportingTask(true, true);
String statusRequest = "provenanceReporting:health,bulletins";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addReportingTaskStatus(expected, true, true, true, true);
assertEquals(expected, actual);
}
@Test
public void systemDiagnosticHeap() throws Exception {
populateSystemDiagnostics();
String statusRequest = "systemDiagnostics:heap";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addSystemDiagnosticStatus(expected, true, false, false, false, false);
assertEquals(expected, actual);
}
@Test
public void systemDiagnosticProcessorStats() throws Exception {
populateSystemDiagnostics();
String statusRequest = "systemDiagnostics:processorStats";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addSystemDiagnosticStatus(expected, false, true, false, false, false);
assertEquals(expected, actual);
}
@Test
public void systemDiagnosticFlowFileRepo() throws Exception {
populateSystemDiagnostics();
String statusRequest = "systemDiagnostics:flowfilerepositoryusage";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addSystemDiagnosticStatus(expected, false, false, true, false, false);
assertEquals(expected, actual);
}
@Test
public void systemDiagnosticContentRepo() throws Exception {
populateSystemDiagnostics();
String statusRequest = "systemDiagnostics:contentrepositoryusage";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addSystemDiagnosticStatus(expected, false, false, false, true, false);
assertEquals(expected, actual);
}
@Test
public void systemDiagnosticGarbageCollection() throws Exception {
populateSystemDiagnostics();
String statusRequest = "systemDiagnostics:garbagecollection";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addSystemDiagnosticStatus(expected, false, false, false, false, true);
assertEquals(expected, actual);
}
@Test
public void systemDiagnosticAll() throws Exception {
populateSystemDiagnostics();
String statusRequest = "systemDiagnostics:garbagecollection, heap, processorstats, contentrepositoryusage, flowfilerepositoryusage";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addSystemDiagnosticStatus(expected, true, true, true, true, true);
assertEquals(expected, actual);
}
@Test
public void instanceStatusHealth() throws Exception {
populateInstance(false);
String statusRequest = "instance:health";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addInstanceStatus(expected, true, false, false, false);
assertEquals(expected, actual);
}
@Test
public void instanceStatusBulletins() throws Exception {
populateInstance(true);
String statusRequest = "instance:bulletins";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addInstanceStatus(expected, false, false, true, true);
assertEquals(expected, actual);
}
@Test
public void instanceStatusStats() throws Exception {
populateInstance(false);
String statusRequest = "instance:stats";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addInstanceStatus(expected, false, true, false, false);
assertEquals(expected, actual);
}
@Test
public void instanceStatusAll() throws Exception {
populateInstance(true);
String statusRequest = "instance:stats, bulletins, health";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addInstanceStatus(expected, true, true, true, true);
assertEquals(expected, actual);
}
@Test
public void controllerServiceStatusHealth() throws Exception {
populateControllerService(false, false);
String statusRequest = "controllerServices:health";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addControllerServiceStatus(expected, true, false, false, false);
assertEquals(expected, actual);
}
@Test
public void controllerServiceStatusBulletins() throws Exception {
populateControllerService(false, true);
String statusRequest = "controllerServices:bulletins";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addControllerServiceStatus(expected, false, false, true, true);
assertEquals(expected, actual);
}
@Test
public void controllerServiceStatusAll() throws Exception {
populateControllerService(true, true);
String statusRequest = "controllerServices:bulletins, health";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addControllerServiceStatus(expected, true, true, true, true);
assertEquals(expected, actual);
}
@Test
public void remoteProcessGroupStatusHealth() throws Exception {
populateRemoteProcessGroup(false, false);
String statusRequest = "remoteProcessGroup:all:health";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addExpectedRemoteProcessGroupStatus(expected, true, false, false, false, false, false);
assertEquals(expected, actual);
}
@Test
public void remoteProcessGroupStatusBulletins() throws Exception {
populateRemoteProcessGroup(true, false);
String statusRequest = "remoteProcessGroup:all:bulletins";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addExpectedRemoteProcessGroupStatus(expected, false, false, false, false, true, true);
assertEquals(expected, actual);
}
@Test
public void remoteProcessGroupStatusInputPorts() throws Exception {
populateRemoteProcessGroup(false, false);
String statusRequest = "remoteProcessGroup:all:inputPorts";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addExpectedRemoteProcessGroupStatus(expected, false, true, false, false, false, false);
assertEquals(expected, actual);
}
@Test
public void remoteProcessGroupStatusOutputPorts() throws Exception {
populateRemoteProcessGroup(false, false);
String statusRequest = "remoteProcessGroup:all:outputPorts";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addExpectedRemoteProcessGroupStatus(expected, false, false, true, false, false, false);
assertEquals(expected, actual);
}
@Test
public void remoteProcessGroupStatusStats() throws Exception {
populateRemoteProcessGroup(false, false);
String statusRequest = "remoteProcessGroup:all:stats";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addExpectedRemoteProcessGroupStatus(expected, false, false, false, true, false, false);
assertEquals(expected, actual);
}
@Test
public void remoteProcessGroupStatusAll() throws Exception {
populateRemoteProcessGroup(true, true);
String statusRequest = "remoteProcessGroup:all:health, bulletins, inputPorts, outputPorts, stats";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addExpectedRemoteProcessGroupStatus(expected, true, true, true, true, true, true);
assertEquals(expected, actual);
}
@Test
public void statusEverything() throws Exception {
when(bulletinRepo.findBulletins(anyObject())).thenReturn(Collections.emptyList());
populateControllerService(true, false);
populateInstance(true);
populateSystemDiagnostics();
populateReportingTask(false, true);
populateConnection();
populateProcessor(true, false);
populateRemoteProcessGroup(false, true);
String statusRequest = "controllerServices:bulletins,health; processor:all:health,stats,bulletins; instance:bulletins,health,stats ; systemDiagnostics:garbagecollection, heap, " +
"processorstats, contentrepositoryusage, flowfilerepositoryusage; connection:all:health,stats; provenanceReporting:health,bulletins; remoteProcessGroup:all:health, " +
"bulletins, inputPorts, outputPorts, stats";
FlowStatusReport actual = StatusConfigReporter.getStatus(mockFlowController, statusRequest, LoggerFactory.getLogger(StatusConfigReporterTest.class));
FlowStatusReport expected = new FlowStatusReport();
expected.setErrorsGeneratingReport(Collections.EMPTY_LIST);
addControllerServiceStatus(expected, true, true, true, false);
addInstanceStatus(expected, true, true, true, true);
addSystemDiagnosticStatus(expected, true, true, true, true, true);
addReportingTaskStatus(expected, true, true, true, false);
addConnectionStatus(expected, true, true);
addProcessorStatus(expected, true, true, true, true, false);
addExpectedRemoteProcessGroupStatus(expected, true, true, true, true, true, false);
assertEquals(expected, actual);
}
/***************************
* Populator methods
*************************/
private void addBulletinsToInstance() {
Bulletin bulletin = mock(Bulletin.class);
when(bulletin.getTimestamp()).thenReturn(new Date(1464019245000L));
when(bulletin.getMessage()).thenReturn("Bulletin message");
List<Bulletin> bulletinList = new ArrayList<>();
bulletinList.add(bulletin);
when(bulletinRepo.findBulletinsForController()).thenReturn(bulletinList);
}
private void populateSystemDiagnostics() {
SystemDiagnostics systemDiagnostics = new SystemDiagnostics();
addGarbageCollectionToSystemDiagnostics(systemDiagnostics);
addHeapSystemDiagnostics(systemDiagnostics);
addContentRepoToSystemDiagnostics(systemDiagnostics);
addFlowFileRepoToSystemDiagnostics(systemDiagnostics);
addProcessorInfoToSystemDiagnostics(systemDiagnostics);
when(mockFlowController.getSystemDiagnostics()).thenReturn(systemDiagnostics);
}
private void populateControllerService(boolean validationErrors, boolean addBulletins) {
ControllerServiceNode controllerServiceNode = mock(ControllerServiceNode.class);
addControllerServiceHealth(controllerServiceNode);
if (validationErrors) {
addValidationErrors(controllerServiceNode);
}
if (addBulletins) {
addBulletins("Bulletin message", controllerServiceNode.getIdentifier());
}
HashSet<ControllerServiceNode> controllerServiceNodes = new HashSet<>();
controllerServiceNodes.add(controllerServiceNode);
when(mockFlowController.getAllControllerServices()).thenReturn(controllerServiceNodes);
}
private void populateInstance(boolean addBulletins) {
setRootGroupStatusVariables();
if (addBulletins) {
addBulletinsToInstance();
}
}
private void populateReportingTask(boolean addBulletins, boolean validationErrors) {
if (addBulletins) {
addBulletins("Bulletin message", "ReportProvenance");
}
ReportingTaskNode reportingTaskNode = mock(ReportingTaskNode.class);
addReportingTaskNodeVariables(reportingTaskNode);
HashSet<ReportingTaskNode> reportingTaskNodes = new HashSet<>();
reportingTaskNodes.add(reportingTaskNode);
when(mockFlowController.getAllReportingTasks()).thenReturn(reportingTaskNodes);
if (validationErrors) {
ValidationResult validationResult = new ValidationResult.Builder()
.input("input")
.subject("subject")
.explanation("is not valid")
.build();
ValidationResult validationResult2 = new ValidationResult.Builder()
.input("input2")
.subject("subject2")
.explanation("is not valid too")
.build();
List<ValidationResult> validationResultList = new ArrayList<>();
validationResultList.add(validationResult);
validationResultList.add(validationResult2);
when(reportingTaskNode.getValidationErrors()).thenReturn(validationResultList);
} else {
when(reportingTaskNode.getValidationErrors()).thenReturn(Collections.EMPTY_LIST);
}
}
private void populateConnection() {
ConnectionStatus connectionStatus = new ConnectionStatus();
connectionStatus.setQueuedBytes(100);
connectionStatus.setId("connectionId");
connectionStatus.setName("connectionName");
connectionStatus.setQueuedCount(10);
connectionStatus.setInputCount(1);
connectionStatus.setInputBytes(2);
connectionStatus.setOutputCount(3);
connectionStatus.setOutputBytes(4);
Collection<ConnectionStatus> statusCollection = new ArrayList<>();
statusCollection.add(connectionStatus);
when(rootGroupStatus.getConnectionStatus()).thenReturn(statusCollection);
}
private void populateProcessor(boolean validationErrors, boolean addBulletins) {
if (addBulletins) {
addBulletins("Bulletin message", "UpdateAttributeProcessorId");
}
ProcessorStatus processorStatus = new ProcessorStatus();
processorStatus.setType("org.apache.nifi.processors.attributes.UpdateAttribute");
processorStatus.setId("UpdateAttributeProcessorId");
processorStatus.setName("UpdateAttributeProcessorName");
processorStatus.setRunStatus(RunStatus.Stopped);
processorStatus.setActiveThreadCount(1);
processorStatus.setFlowFilesReceived(2);
processorStatus.setBytesRead(3);
processorStatus.setBytesWritten(4);
processorStatus.setFlowFilesSent(5);
processorStatus.setInvocations(6);
processorStatus.setProcessingNanos(7);
Collection<ProcessorStatus> statusCollection = new ArrayList<>();
statusCollection.add(processorStatus);
mockProcessorEmptyValidation(processorStatus.getId(), processGroup);
when(rootGroupStatus.getProcessorStatus()).thenReturn(statusCollection);
ProcessorNode processorNode = mock(ProcessorNode.class);
when(processGroup.getProcessor(processorStatus.getId())).thenReturn(processorNode);
if (validationErrors) {
ValidationResult validationResult = new ValidationResult.Builder()
.input("input")
.subject("subject")
.explanation("is not valid")
.build();
ValidationResult validationResult2 = new ValidationResult.Builder()
.input("input2")
.subject("subject2")
.explanation("is not valid too")
.build();
List<ValidationResult> validationResultList = new ArrayList<>();
validationResultList.add(validationResult);
validationResultList.add(validationResult2);
when(processorNode.getValidationErrors()).thenReturn(validationResultList);
} else {
when(processorNode.getValidationErrors()).thenReturn(Collections.EMPTY_LIST);
}
}
private void populateRemoteProcessGroup(boolean addBulletins, boolean addAuthIssues) {
when(mockFlowController.getGroup(mockFlowController.getRootGroupId())).thenReturn(processGroup);
RemoteProcessGroup remoteProcessGroup = mock(RemoteProcessGroup.class);
when(processGroup.getRemoteProcessGroup(any())).thenReturn(remoteProcessGroup);
RemoteGroupPort remoteGroupPort = mock(RemoteGroupPort.class);
when(remoteGroupPort.getName()).thenReturn("inputPort");
when(remoteGroupPort.getTargetExists()).thenReturn(true);
when(remoteGroupPort.isTargetRunning()).thenReturn(false);
when(remoteProcessGroup.getInputPorts()).thenReturn(Collections.singleton(remoteGroupPort));
remoteGroupPort = mock(RemoteGroupPort.class);
when(remoteGroupPort.getName()).thenReturn("outputPort");
when(remoteGroupPort.getTargetExists()).thenReturn(true);
when(remoteGroupPort.isTargetRunning()).thenReturn(false);
when(remoteProcessGroup.getOutputPorts()).thenReturn(Collections.singleton(remoteGroupPort));
RemoteProcessGroupStatus remoteProcessGroupStatus = new RemoteProcessGroupStatus();
addRemoteProcessGroupStatus(remoteProcessGroupStatus);
if (addBulletins) {
addBulletins("Bulletin message", remoteProcessGroupStatus.getId());
}
when(rootGroupStatus.getRemoteProcessGroupStatus()).thenReturn(Collections.singletonList(remoteProcessGroupStatus));
}
private void setRootGroupStatusVariables() {
when(rootGroupStatus.getQueuedContentSize()).thenReturn(1L);
when(rootGroupStatus.getQueuedCount()).thenReturn(2);
when(rootGroupStatus.getActiveThreadCount()).thenReturn(3);
when(rootGroupStatus.getBytesRead()).thenReturn(1L);
when(rootGroupStatus.getBytesWritten()).thenReturn(2L);
when(rootGroupStatus.getBytesSent()).thenReturn(3L);
when(rootGroupStatus.getFlowFilesSent()).thenReturn(4);
when(rootGroupStatus.getBytesTransferred()).thenReturn(5L);
when(rootGroupStatus.getFlowFilesTransferred()).thenReturn(6);
when(rootGroupStatus.getBytesReceived()).thenReturn(7L);
when(rootGroupStatus.getFlowFilesReceived()).thenReturn(8);
}
private void addGarbageCollectionToSystemDiagnostics(SystemDiagnostics systemDiagnostics) {
Map<String, GarbageCollection> garbageCollectionMap = new HashMap<>();
GarbageCollection garbageCollection1 = new GarbageCollection();
garbageCollection1.setCollectionCount(1);
garbageCollection1.setCollectionTime(10);
garbageCollection1.setName("garbage 1");
garbageCollectionMap.put(garbageCollection1.getName(), garbageCollection1);
systemDiagnostics.setGarbageCollection(garbageCollectionMap);
}
private void addContentRepoToSystemDiagnostics(SystemDiagnostics systemDiagnostics) {
Map<String, StorageUsage> stringStorageUsageMap = new HashMap<>();
StorageUsage repoUsage1 = new StorageUsage();
repoUsage1.setFreeSpace(30);
repoUsage1.setTotalSpace(100);
repoUsage1.setIdentifier("Content repo1");
stringStorageUsageMap.put(repoUsage1.getIdentifier(), repoUsage1);
systemDiagnostics.setContentRepositoryStorageUsage(stringStorageUsageMap);
}
private void addFlowFileRepoToSystemDiagnostics(SystemDiagnostics systemDiagnostics) {
StorageUsage repoUsage = new StorageUsage();
repoUsage.setFreeSpace(30);
repoUsage.setTotalSpace(100);
repoUsage.setIdentifier("FlowFile repo");
systemDiagnostics.setFlowFileRepositoryStorageUsage(repoUsage);
}
private void addHeapSystemDiagnostics(SystemDiagnostics systemDiagnostics) {
systemDiagnostics.setMaxHeap(5);
systemDiagnostics.setTotalHeap(3);
systemDiagnostics.setUsedHeap(2);
systemDiagnostics.setMaxNonHeap(9);
systemDiagnostics.setTotalNonHeap(8);
systemDiagnostics.setUsedNonHeap(6);
}
private void addProcessorInfoToSystemDiagnostics(SystemDiagnostics systemDiagnostics) {
systemDiagnostics.setProcessorLoadAverage(80.9);
systemDiagnostics.setAvailableProcessors(5);
}
private void mockProcessorEmptyValidation(String id, ProcessGroup processGroup) {
ProcessorNode processorNode = mock(ProcessorNode.class);
when(processGroup.getProcessor(id)).thenReturn(processorNode);
when(processorNode.getValidationErrors()).thenReturn(Collections.emptyList());
}
private void addControllerServiceHealth(ControllerServiceNode controllerServiceNode) {
when(controllerServiceNode.getName()).thenReturn("mockControllerService");
when(controllerServiceNode.getIdentifier()).thenReturn("mockControllerService");
when(controllerServiceNode.getState()).thenReturn(ControllerServiceState.ENABLED);
when(controllerServiceNode.getValidationErrors()).thenReturn(Collections.emptyList());
}
private void addReportingTaskNodeVariables(ReportingTaskNode reportingTaskNode) {
when(reportingTaskNode.getValidationErrors()).thenReturn(Collections.emptyList());
when(reportingTaskNode.getActiveThreadCount()).thenReturn(1);
when(reportingTaskNode.getScheduledState()).thenReturn(ScheduledState.RUNNING);
when(reportingTaskNode.getIdentifier()).thenReturn("ReportProvenance");
when(reportingTaskNode.getName()).thenReturn("ReportProvenance");
}
private void addRemoteProcessGroupStatus(RemoteProcessGroupStatus remoteProcessGroupStatus) {
remoteProcessGroupStatus.setName("rpg1");
remoteProcessGroupStatus.setId("rpg1");
remoteProcessGroupStatus.setTransmissionStatus(TransmissionStatus.Transmitting);
remoteProcessGroupStatus.setActiveRemotePortCount(1);
remoteProcessGroupStatus.setInactiveRemotePortCount(2);
remoteProcessGroupStatus.setActiveThreadCount(3);
remoteProcessGroupStatus.setSentContentSize(4L);
remoteProcessGroupStatus.setSentCount(5);
}
private void addBulletins(String message, String sourceId) {
Bulletin bulletin = mock(Bulletin.class);
when(bulletin.getTimestamp()).thenReturn(new Date(1464019245000L));
when(bulletin.getMessage()).thenReturn(message);
List<Bulletin> bulletinList = new ArrayList<>();
bulletinList.add(bulletin);
BulletinQueryAnswer bulletinQueryAnswer = new BulletinQueryAnswer(sourceId, bulletinList);
when(bulletinRepo.findBulletins(anyObject())).then(bulletinQueryAnswer);
}
private void addValidationErrors(ComponentNode connectable) {
ValidationResult validationResult = new ValidationResult.Builder()
.input("input")
.subject("subject")
.explanation("is not valid")
.build();
ValidationResult validationResult2 = new ValidationResult.Builder()
.input("input2")
.subject("subject2")
.explanation("is not valid too")
.build();
List<ValidationResult> validationResultList = new ArrayList<>();
validationResultList.add(validationResult);
validationResultList.add(validationResult2);
when(connectable.getValidationErrors()).thenReturn(validationResultList);
}
private class BulletinQueryAnswer implements Answer {
final List<Bulletin> bulletinList;
String idToMatch = "";
private BulletinQueryAnswer(String idToMatch, List<Bulletin> bulletinList) {
this.idToMatch = idToMatch;
this.bulletinList = bulletinList;
}
@Override
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
BulletinQuery bulletinQuery = (BulletinQuery) invocationOnMock.getArguments()[0];
if (idToMatch.equals(bulletinQuery.getSourceIdPattern().toString())) {
return bulletinList;
}
return Collections.emptyList();
}
}
}
|
apache/sis | 38,037 | endorsed/src/org.apache.sis.metadata/main/org/apache/sis/util/iso/Types.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.util.iso;
import java.util.Map;
import java.util.SortedMap;
import java.util.Locale;
import java.util.Properties;
import java.util.ResourceBundle;
import java.util.MissingResourceException;
import java.util.IllformedLocaleException;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.logging.LogRecord;
import java.util.function.Function;
import java.io.IOException;
import org.opengis.annotation.UML;
import org.opengis.util.CodeList;
import org.opengis.util.InternationalString;
import org.apache.sis.util.SimpleInternationalString;
import org.apache.sis.util.DefaultInternationalString;
import org.apache.sis.util.ResourceInternationalString;
import org.apache.sis.util.Locales;
import org.apache.sis.util.CharSequences;
import org.apache.sis.util.ArgumentChecks;
import org.apache.sis.util.OptionalCandidate;
import org.apache.sis.util.logging.Logging;
import org.apache.sis.util.resources.Errors;
import org.apache.sis.util.resources.Messages;
import org.apache.sis.util.collection.BackingStoreException;
import org.apache.sis.util.internal.shared.CodeLists;
import org.apache.sis.util.internal.shared.Strings;
import org.apache.sis.pending.jdk.JDK19;
import org.apache.sis.system.Modules;
// Specific to the main branch:
import java.io.InputStream;
/**
* Static methods working on GeoAPI types and {@link CodeList} values.
* This class provides:
*
* <ul>
* <li>Methods for fetching the ISO name or description of a code list:<ul>
* <li>{@link #getStandardName(Class)} for ISO name</li>
* <li>{@link #getListName(CodeList)} for ISO name</li>
* <li>{@link #getDescription(Class)} for a description</li>
* </ul></li>
* <li>Methods for fetching the ISO name or description of a code value:<ul>
* <li>{@link #getCodeName(CodeList)} for ISO name,</li>
* <li>{@link #getCodeTitle(CodeList)} for a label or title</li>
* <li>{@link #getDescription(CodeList)} for a more verbose description</li>
* </ul></li>
* <li>Methods for fetching an instance from a name (converse of above {@code get} methods):<ul>
* <li>{@link #forCodeName(Class, String, Function)}</li>
* <li>{@link #forEnumName(Class, String)}</li>
* </ul></li>
* </ul>
*
* <h2>Substituting a free text by a code list</h2>
* The ISO standard allows to substitute some character strings in the <q>free text</q> domain
* by a {@link CodeList} value. Such substitution can be done with:
*
* <ul>
* <li>{@link #getCodeTitle(CodeList)} for getting the {@link InternationalString} instance
* to store in a metadata property.</li>
* <li>{@link #forCodeTitle(CharSequence)} for retrieving the {@link CodeList} previously stored as an
* {@code InternationalString}.</li>
* </ul>
*
* <h2>Example</h2>
* In the following XML fragment, the {@code <mac:type>} value is normally a {@code <gco:CharacterString>}
* but has been replaced by a {@code SensorType} code below:
*
* {@snippet lang="xml" :
* <mac:MI_Instrument>
* <mac:type>
* <gmi:MI_SensorTypeCode
* codeList="http://standards.iso.org/…snip…/codelists.xml#CI_SensorTypeCode"
* codeListValue="RADIOMETER">Radiometer</gmi:MI_SensorTypeCode>
* </mac:type>
* </mac:MI_Instrument>
* }
*
* @author Martin Desruisseaux (IRD, Geomatys)
* @version 1.5
* @since 0.3
*/
public final class Types {
/**
* The separator character between class name and attribute name in resource files.
*/
private static final char SEPARATOR = '.';
/**
* The logger for metadata.
*/
private static final Logger LOGGER = Logger.getLogger(Modules.METADATA);
/**
* The types for ISO 19115 UML identifiers. The keys are UML identifiers.
* Values are either class names as {@link String} objects, or the {@link Class} instances.
* This map will be built only when first needed.
*
* @see #forStandardName(String)
*/
private static Map<String,Object> typeForNames;
/**
* Do not allow instantiation of this class.
*/
private Types() {
}
/**
* Returns the ISO name for the given class, or {@code null} if none.
* This method can be used for GeoAPI interfaces or {@link CodeList}.
*
* <h4>Examples</h4>
* <ul>
* <li><code>getStandardName({@linkplain org.opengis.metadata.citation.Citation}.class)</code>
* (an interface) returns {@code "CI_Citation"}.</li>
* <li><code>getStandardName({@linkplain org.opengis.referencing.cs.AxisDirection}.class)</code>
* (a code list) returns {@code "CS_AxisDirection"}.</li>
* </ul>
*
* <h4>Implementation note</h4>
* This method looks for the {@link UML} annotation on the given type. It does not search for
* parent classes or interfaces if the given type is not directly annotated (i.e. {@code @UML}
* annotations are not inherited). If no annotation is found, then this method does not fallback
* on the Java name since, as the name implies, this method is about standard names.
*
* @param type the GeoAPI interface or code list from which to get the ISO name, or {@code null}.
* @return the ISO name for the given type, or {@code null} if none or if the given type is {@code null}.
*
* @see #forStandardName(String)
*/
@OptionalCandidate
public static String getStandardName(final Class<?> type) {
if (type != null) {
final UML uml = type.getAnnotation(UML.class);
if (uml != null) {
final String id = uml.identifier();
if (id != null && !id.isEmpty()) {
/*
* Workaround: I though that annotation strings were interned like any other constants,
* but it does not seem to be the case as of JDK7. To verify if this explicit call to
* String.intern() is still needed in a future JDK release, see the workaround comment
* in the org.apache.sis.metadata.PropertyAccessor.name(…) method.
*/
return id.intern();
}
}
}
return null;
}
/**
* Returns the ISO classname (if available) or the Java classname (as a fallback) of the given
* enumeration or code list value. This method uses the {@link UML} annotation if it exists, or
* fallback on the {@linkplain Class#getSimpleName() simple class name} otherwise.
*
* <h4>Examples</h4>
* <ul>
* <li>{@code getListName(ParameterDirection.IN_OUT)} returns {@code "SV_ParameterDirection"}.</li>
* <li>{@code getListName(AxisDirection.NORTH)} returns {@code "CS_AxisDirection"}.</li>
* <li>{@code getListName(TopicCategory.INLAND_WATERS)} returns {@code "MD_TopicCategoryCode"}.</li>
* <li>{@code getListName(ImagingCondition.BLURRED_IMAGE)} returns {@code "MD_ImagingConditionCode"}.</li>
* </ul>
*
* @param code the code for which to get the class name, or {@code null}.
* @return the ISO (preferred) or Java (fallback) class name, or {@code null} if the given code is null.
*/
public static String getListName(final CodeList<?> code) {
if (code == null) {
return null;
}
final Class<?> type = code.getClass();
final String id = getStandardName(type);
return (id != null) ? id : type.getSimpleName();
}
/**
* Returns the ISO name (if available) or the Java name (as a fallback) of the given enumeration or code list
* value. If the value has no {@link UML} identifier, then the programmatic name is used as a fallback.
*
* <h4>Examples</h4>
* <ul>
* <li>{@code getCodeName(ParameterDirection.IN_OUT)} returns {@code "in/out"}.</li>
* <li>{@code getCodeName(AxisDirection.NORTH)} returns {@code "north"}.</li>
* <li>{@code getCodeName(TopicCategory.INLAND_WATERS)} returns {@code "inlandWaters"}.</li>
* <li>{@code getCodeName(ImagingCondition.BLURRED_IMAGE)} returns {@code "blurredImage"}.</li>
* </ul>
*
* @param code the code for which to get the name, or {@code null}.
* @return the UML identifiers or programmatic name for the given code, or {@code null} if the given code is null.
*
* @see #getCodeLabel(CodeList)
* @see #getCodeTitle(CodeList)
* @see #getDescription(CodeList)
* @see #forCodeName(Class, String, Function)
*/
public static String getCodeName(final CodeList<?> code) {
if (code == null) {
return null;
}
String id = null;
if (code instanceof CodeList<?>) {
id = ((CodeList<?>) code).identifier();
}
if (id == null) {
id = code.name();
for (String name : code.names()) {
if (!name.equals(id)) {
id = name;
break;
}
}
}
return id.isEmpty() ? code.name() : id;
}
/**
* Returns a unlocalized title for the given enumeration or code list value.
* This method builds a title using heuristics rules, which should give reasonable
* results without the need of resource bundles. For better results, consider using
* {@link #getCodeTitle(CodeList)} instead.
*
* <p>The current heuristic implementation iterates over {@linkplain CodeList#names() all code names},
* selects the longest one excluding the {@linkplain CodeList#name() field name} if possible, then
* {@linkplain CharSequences#camelCaseToSentence(CharSequence) makes a sentence} from that name.</p>
*
* <h4>Examples</h4>
* <ul>
* <li>{@code getCodeLabel(AxisDirection.NORTH)} returns {@code "North"}.</li>
* <li>{@code getCodeLabel(TopicCategory.INLAND_WATERS)} returns {@code "Inland waters"}.</li>
* <li>{@code getCodeLabel(ImagingCondition.BLURRED_IMAGE)} returns {@code "Blurred image"}.</li>
* </ul>
*
* @param code the code from which to get a title, or {@code null}.
* @return a unlocalized title for the given code, or {@code null} if the given code is null.
*
* @see #getCodeName(CodeList)
* @see #getCodeTitle(CodeList)
* @see #getDescription(CodeList)
*/
public static String getCodeLabel(final CodeList<?> code) {
if (code == null) {
return null;
}
final String name = code.name();
String id = getCodeName(code);
for (final String candidate : code.names()) {
if (!candidate.equals(name) && candidate.length() >= id.length()) {
id = candidate;
}
}
return CharSequences.camelCaseToSentence(id).toString();
}
/**
* Returns the title of the given enumeration or code list value. Title are usually much shorter than descriptions.
* English titles are often the same as the {@linkplain #getCodeLabel(CodeList) code labels}.
*
* <p>The code or enumeration value given in argument to this method can be retrieved from the returned title
* with the {@link #forCodeTitle(CharSequence)} method. See <cite>Substituting a free text by a code list</cite>
* in this class javadoc for more information.</p>
*
* @param code the code for which to get the title, or {@code null}.
* @return the title, or {@code null} if the given code is null.
*
* @see #getDescription(CodeList)
* @see #forCodeTitle(CharSequence)
*/
public static InternationalString getCodeTitle(final CodeList<?> code) {
return (code != null) ? new CodeTitle(code) : null;
}
/**
* Returns the description of the given enumeration or code list value, or {@code null} if none.
* For a description of the code list as a whole instead of a particular code,
* see {@link Types#getDescription(Class)}.
*
* @param code the code for which to get the localized description, or {@code null}.
* @return the description, or {@code null} if none or if the given code is null.
*
* @see #getCodeTitle(CodeList)
* @see #getDescription(Class)
*/
@OptionalCandidate
public static InternationalString getDescription(final CodeList<?> code) {
if (code != null && hasResources(code.getClass())) {
return new Description(Description.resourceKey(code));
}
return null;
}
/**
* Returns a description for the given class, or {@code null} if none.
* This method can be used for GeoAPI interfaces or {@link CodeList}.
*
* @param type the GeoAPI interface or code list from which to get the description, or {@code null}.
* @return the description, or {@code null} if none or if the given type is {@code null}.
*
* @see #getDescription(CodeList)
*/
@OptionalCandidate
public static InternationalString getDescription(final Class<?> type) {
final String name = getStandardName(type);
if (name != null && hasResources(type)) {
return new Description(name);
}
return null;
}
/**
* Returns a description for the given property, or {@code null} if none.
* The given type shall be a GeoAPI interface, and the given property shall
* be a UML identifier. If any of the input argument is {@code null}, then
* this method returns {@code null}.
*
* @param type the GeoAPI interface from which to get the description of a property, or {@code null}.
* @param property the ISO name of the property for which to get the description, or {@code null}.
* @return the description, or {@code null} if none or if the given type or property name is {@code null}.
*/
public static InternationalString getDescription(final Class<?> type, final String property) {
if (property != null) {
final String name = getStandardName(type);
if (name != null && hasResources(type)) {
return new Description(name + SEPARATOR + property);
}
}
return null;
}
/**
* The {@link InternationalString} returned by the {@code Types.getDescription(…)} methods.
*
* @author Martin Desruisseaux (Geomatys)
*/
private static class Description extends ResourceInternationalString {
/**
* For cross-version compatibility.
*/
private static final long serialVersionUID = -6202647167398898834L;
/**
* The class loader to use for fetching GeoAPI resources.
*/
private static final ClassLoader CLASSLOADER = Types.class.getClassLoader();
/**
* Creates a new international string from the specified resource bundle and key.
*
* @param key the key for the resource to fetch.
*/
Description(final String key) {
super(key);
}
/**
* Loads the GeoAPI resources. A cache is managed by {@link ResourceBundle}.
*/
@Override
protected ResourceBundle getBundle(final Locale locale) {
return ResourceBundle.getBundle("org.opengis.metadata.Descriptions", locale, CLASSLOADER);
}
/**
* Returns the description for the given locale, or fallback on a default description
* if no resources exist for that locale.
*/
@Override
public final String toString(final Locale locale) {
try {
return super.toString(locale);
} catch (MissingResourceException e) {
Logging.ignorableException(Messages.LOGGER, ResourceInternationalString.class, "toString", e);
return fallback();
}
}
/**
* Returns a fallback if no resource is found.
*/
String fallback() {
return CharSequences.camelCaseToSentence(key.substring(key.lastIndexOf(SEPARATOR) + 1)).toString();
}
/**
* Returns the resource key for the given code list.
*/
static String resourceKey(final CodeList<?> code) {
String key = getCodeName(code);
if (key.indexOf(SEPARATOR) < 0) {
key = getListName(code) + SEPARATOR + key;
}
return key;
}
}
/**
* The {@link InternationalString} returned by the {@code Types.getCodeTitle(…)} method.
* The code below is a duplicated - in a different way - of {@code CodeListUID(CodeList)} constructor.
* This duplication exists because {@code CodeListUID} constructor stores more information in an opportunist way.
* If this class is updated, please update {@code CodeListUID(CodeList)} accordingly.
*
* @author Martin Desruisseaux (Geomatys)
*/
private static final class CodeTitle extends Description {
/**
* For cross-version compatibility.
*/
private static final long serialVersionUID = 3306532357801489365L;
/**
* The code list for which to create a title.
*/
final CodeList<?> code;
/**
* Creates a new international string for the given code list element.
*
* @param code the code list for which to create a title.
*/
CodeTitle(final CodeList<?> code) {
super(resourceKey(code));
this.code = code;
}
/**
* Loads the GeoAPI resources. A cache is managed by {@link ResourceBundle}.
*/
@Override
protected ResourceBundle getBundle(final Locale locale) {
return ResourceBundle.getBundle(CodeLists.RESOURCES, locale);
}
/**
* Returns a fallback if no resource is found.
*/
@Override
String fallback() {
return getCodeLabel(code);
}
}
/**
* Returns whether the specified class is expected to have GeoAPI resources.
*
* @param type the type to test.
* @return whether the given class is expected to have resources.
*/
private static boolean hasResources(final Class<?> type) {
return type.getName().startsWith("org.opengis.metadata.");
}
/**
* Returns the Java type (usually a GeoAPI interface) for the given ISO name, or {@code null} if none.
* The identifier argument shall be the value documented in the {@link UML#identifier()} annotation on
* the Java type.
*
* <h4>Examples</h4>
* <ul>
* <li>{@code forStandardName("CI_Citation")} returns <code>{@linkplain org.opengis.metadata.citation.Citation}.class</code></li>
* <li>{@code forStandardName("CS_AxisDirection")} returns <code>{@linkplain org.opengis.referencing.cs.AxisDirection}.class</code></li>
* </ul>
*
* <h4>Implementation note</h4>
* The package prefix (e.g. {@code "CI_"} in {@code "CI_Citation"}) can be omitted.
* The flexibility is provided for allowing transition to newer ISO standards,
* which are dropping the package prefixes.
* For example, {@code "CS_AxisDirection"} in ISO 19111:2007
* has been renamed {@code "AxisDirection"} in ISO 19111:2019.
*
* <p>Only identifiers for the stable part of GeoAPI or for some Apache SIS classes are recognized.
* This method does not handle the identifiers for interfaces in the {@code geoapi-pending} module.</p>
*
* <h4>Future evolution</h4>
* When a new ISO type does not yet have a corresponding GeoAPI interface,
* this method may temporarily return an Apache SIS class instead, until a future version can use the interface.
* For example, {@code forStandardName("CI_Individual")} returns
* <code>{@linkplain org.apache.sis.metadata.iso.citation.DefaultIndividual}.class</code> in Apache SIS versions
* that depend on GeoAPI 3.0, but the return type may be changed to {@code Individual.class} when Apache SIS will
* be upgraded to GeoAPI 3.1.
*
* @param identifier the ISO {@linkplain UML} identifier, or {@code null}.
* @return the GeoAPI interface, or {@code null} if the given identifier is {@code null} or unknown.
*/
public static synchronized Class<?> forStandardName(final String identifier) {
if (identifier == null) {
return null;
}
if (typeForNames == null) {
final Class<Types> c = Types.class;
final InputStream in = c.getResourceAsStream("class-index.properties");
if (in == null) {
throw new MissingResourceException("class-index.properties", c.getName(), identifier);
}
final Properties props = new Properties();
try {
props.load(in);
in.close();
} catch (IOException e) {
throw new BackingStoreException(e);
}
/*
* Copy all map entries from Properties to a new HashMap for avoiding Properties synchronization.
* Also use internized strings because those Strings are programmatic names or annotation values
* which are expected to be internized anyway when the corresponding classes are loaded.
*/
typeForNames = JDK19.newHashMap(2 * props.size());
for (final Map.Entry<Object,Object> e : props.entrySet()) {
final String key = ((String) e.getKey()).intern();
final String value = ((String) e.getValue()).intern();
typeForNames.put(key, value);
// Heuristic rule for omitting the prefix (e.g. "CI_" in "CI_Citation").
if (key.length() > 3 && key.charAt(2) == '_' && Character.isUpperCase(key.charAt(1))) {
typeForNames.putIfAbsent(key.substring(3).intern(), value);
}
}
// Following code list is not defined in ISO 19115-2 but appears in XML schemas.
typeForNames.putIfAbsent("MI_SensorTypeCode", "org.apache.sis.xml.bind.metadata.replace.SensorType");
}
/*
* Get the interface class for the given identifier, loading the class when first needed.
*/
final Object value = typeForNames.get(identifier);
if (value == null || value instanceof Class<?>) {
return (Class<?>) value;
}
final Class<?> type;
try {
type = Class.forName((String) value);
} catch (ClassNotFoundException e) {
throw new TypeNotPresentException((String) value, e);
}
typeForNames.put(identifier.intern(), type);
return type;
}
/**
* Returns the enumeration value of the given type that matches the given name, or {@code null} if none.
* This method is similar to the standard {@code Enum.valueOf(…)} method, except that this method is more
* tolerant on string comparisons:
*
* <ul>
* <li>Name comparisons are case-insensitive.</li>
* <li>Only {@linkplain Character#isLetterOrDigit(int) letter and digit} characters are compared.
* Spaces and punctuation characters like {@code '_'} and {@code '-'} are ignored.</li>
* </ul>
*
* If there is no match, this method returns {@code null} — it does not thrown an exception,
* unless the given class is not an enumeration.
*
* @param <T> the compile-time type given as the {@code enumType} parameter.
* @param enumType the type of enumeration.
* @param name the name of the enumeration value to obtain, or {@code null}.
* @return a value matching the given name, or {@code null} if the name is null
* or if no matching enumeration is found.
*
* @see Enum#valueOf(Class, String)
*
* @since 0.5
*/
@OptionalCandidate
public static <T extends Enum<T>> T forEnumName(final Class<T> enumType, String name) {
try {
return CodeLists.forEnumName(enumType, name);
} catch (IllegalArgumentException e) {
return null;
}
}
/**
* Returns the code of the given type that matches the given name, or optionally returns a new one if none
* match the name. This method performs the same work as the GeoAPI {@code CodeList.valueOf(…)} method,
* except that this method is more tolerant on string comparisons when looking for an existing code:
*
* <ul>
* <li>Name comparisons are case-insensitive.</li>
* <li>Only {@linkplain Character#isLetterOrDigit(int) letter and digit} characters are compared.
* Spaces and punctuation characters like {@code '_'} and {@code '-'} are ignored.</li>
* </ul>
*
* If no match is found, then a new code is created only if the {@code creator} argument is non-null.
* That argument should be a lambda function to the {@code valueOf(String)} method of the code list class.
* Example:
*
* {@snippet lang="java" :
* AxisDirection dir = Types.forCodeName(AxisDirection.class, name, AxisDirection::valueOf);
* }
*
* If the {@code constructor} is null and no existing code matches the given name,
* then this method returns {@code null}.
*
* @param <T> the compile-time type given as the {@code codeType} parameter.
* @param codeType the type of code list.
* @param name the name of the code to obtain, or {@code null}.
* @param constructor the constructor to use if a new code needs to be created,
* or {@code null} for not creating any new code.
* @return a code matching the given name, or {@code null} if the name is null
* or if no matching code is found and {@code constructor} is {@code null}.
*
* @see #getCodeName(ControlledVocabulary)
* @see CodeList#valueOf(Class, String, Function)
*
* @since 1.5
*/
@OptionalCandidate
public static <T extends CodeList<T>> T forCodeName(final Class<T> codeType, String name,
final Function<? super String, ? extends T> constructor)
{
name = Strings.trimOrNull(name);
if (name == null) {
return null; // Avoid initialization of the <T> class.
}
T code = CodeLists.forCodeName(codeType, name);
if (code == null && constructor != null) {
code = constructor.apply(name);
}
return code;
}
/**
* Returns the code list or enumeration value for the given title, or {@code null} if none.
* The current implementation performs the following choice:
*
* <ul>
* <li>If the given title is a value returned by a previous call to {@link #getCodeTitle(CodeList)},
* returns the code or enumeration value used for creating that title.</li>
* <li>Otherwise returns {@code null}.</li>
* </ul>
*
* @param title the title for which to get a code or enumeration value, or {@code null}.
* @return the code or enumeration value associated with the given title, or {@code null}.
*
* @see #getCodeTitle(CodeList)
*
* @since 0.7
*/
public static CodeList<?> forCodeTitle(final CharSequence title) {
return (title instanceof CodeTitle) ? ((CodeTitle) title).code : null;
}
/**
* Returns an international string for the values in the given properties map, or {@code null} if none.
* This method is used when a property in a {@link java.util.Map} may have many localized variants.
* For example, the given map may contains a {@code "remarks"} property defined by values associated to
* the {@code "remarks_en"} and {@code "remarks_fr"} keys, for English and French locales respectively.
*
* <p>If the given map is {@code null}, then this method returns {@code null}.
* Otherwise this method iterates over the entries having a key that starts with the specified prefix,
* followed by the {@code '_'} character. For each such key:</p>
*
* <ul>
* <li>If the key is exactly equals to {@code prefix}, selects {@link Locale#ROOT}.</li>
* <li>Otherwise the characters after {@code '_'} are parsed as an ISO language and country code
* by the {@link Locales#parse(String, int)} method. Note that 3-letters codes are replaced
* by their 2-letters counterparts on a <em>best effort</em> basis.</li>
* <li>The value for the decoded locale is added in the international string to be returned.</li>
* </ul>
*
* @param properties the map from which to get the string values for an international string, or {@code null}.
* @param prefix the prefix of keys to use for creating the international string.
* @return the international string, or {@code null} if the given map is null or does not contain values
* associated to keys starting with the given prefix.
* @throws IllegalArgumentException if a key starts by the given prefix and:
* <ul>
* <li>The key suffix is an illegal {@link Locale} code,</li>
* <li>or the value associated to that key is a not a {@link CharSequence}.</li>
* </ul>
*
* @see Locales#parse(String, int)
* @see DefaultInternationalString#DefaultInternationalString(Map)
*
* @since 0.4
*/
@OptionalCandidate
public static InternationalString toInternationalString(Map<String,?> properties, final String prefix)
throws IllegalArgumentException
{
ArgumentChecks.ensureNonEmpty("prefix", prefix);
if (properties == null) {
return null;
}
/*
* If the given map is an instance of SortedMap using the natural ordering of keys,
* we can skip all keys that lexicographically precedes the given prefix.
*/
boolean isSorted = false;
if (properties instanceof SortedMap<?,?>) {
final SortedMap<String,?> sorted = (SortedMap<String,?>) properties;
if (sorted.comparator() == null) { // We want natural ordering.
properties = sorted.tailMap(prefix);
isSorted = true;
}
}
/*
* Now iterates over the map entry and lazily create the InternationalString
* only when first needed. In most cases, we have 0 or 1 matching entry.
*/
CharSequence i18n = null;
Locale firstLocale = null;
DefaultInternationalString dis = null;
final int offset = prefix.length();
for (final Map.Entry<String,?> entry : properties.entrySet()) {
final String key = entry.getKey();
if (key == null) {
continue; // Tolerance for Map that accept null keys.
}
if (!key.startsWith(prefix)) {
if (isSorted) break; // If the map is sorted, there is no need to check next entries.
continue;
}
final Locale locale;
if (key.length() == offset) {
locale = Locale.ROOT;
} else {
final char c = key.charAt(offset);
if (c != '_') {
if (isSorted && c > '_') break;
continue;
}
final int s = offset + 1;
try {
locale = Locales.parse(key, s);
} catch (IllformedLocaleException e) {
throw new IllegalArgumentException(Errors.forProperties(properties).getString(
Errors.Keys.IllegalLanguageCode_1, '(' + key.substring(0, s) + ')' + key.substring(s), e));
}
}
final Object value = entry.getValue();
if (value != null) {
if (!(value instanceof CharSequence)) {
throw new IllegalArgumentException(Errors.forProperties(properties)
.getString(Errors.Keys.IllegalPropertyValueClass_2, key, value.getClass()));
}
if (i18n == null) {
i18n = (CharSequence) value;
firstLocale = locale;
} else {
if (dis == null) {
dis = new DefaultInternationalString();
add(dis, firstLocale, i18n);
i18n = dis;
}
add(dis, locale, (CharSequence) value);
}
}
}
return toInternationalString(i18n);
}
/**
* Adds the given character sequence. If the given sequence is another {@link InternationalString} instance,
* then only the string for the given locale is added.
*
* @param locale the locale for the {@code string} value.
* @param string the character sequence to add.
* @throws IllegalArgumentException if a different string value was already set for the given locale.
*/
private static void add(final DefaultInternationalString dis, final Locale locale, final CharSequence string) {
final boolean i18n = (string instanceof InternationalString);
dis.add(locale, i18n ? ((InternationalString) string).toString(locale) : string.toString());
if (i18n && !(string instanceof SimpleInternationalString)) {
/*
* If the string may have more than one locale, log a warning telling that some locales
* may have been ignored. We declare Types.toInternationalString(…) as the source since
* it is the public facade invoking this method.
*/
final LogRecord record = Messages.forLocale(null).createLogRecord(Level.WARNING, Messages.Keys.LocalesDiscarded);
Logging.completeAndLog(LOGGER, Types.class, "toInternationalString", record);
}
}
/**
* Returns the given characters sequence as an international string. If the given sequence is
* null or an instance of {@link InternationalString}, then this method returns it unchanged.
* Otherwise, this method copies the {@link InternationalString#toString()} value in a new
* {@link SimpleInternationalString} instance and returns it.
*
* @param string the characters sequence to convert, or {@code null}.
* @return the given sequence as an international string, or {@code null} if the given sequence was null.
*
* @see DefaultNameFactory#createInternationalString(Map)
*/
public static InternationalString toInternationalString(final CharSequence string) {
if (string == null || string instanceof InternationalString) {
return (InternationalString) string;
}
return new SimpleInternationalString(string.toString());
}
/**
* Returns the given array of {@code CharSequence}s as an array of {@code InternationalString}s.
* If the given array is null or an instance of {@code InternationalString[]}, then this method
* returns it unchanged. Otherwise a new array of type {@code InternationalString[]} is created
* and every elements from the given array is copied or
* {@linkplain #toInternationalString(CharSequence) cast} in the new array.
*
* <p>If a defensive copy of the {@code strings} array is wanted, then the caller needs to check
* if the returned array is the same instance as the one given in argument to this method.</p>
*
* @param strings the characters sequences to convert, or {@code null}.
* @return the given array as an array of type {@code InternationalString[]},
* or {@code null} if the given array was null.
*/
public static InternationalString[] toInternationalStrings(final CharSequence... strings) {
if (strings == null || strings instanceof InternationalString[]) {
return (InternationalString[]) strings;
}
final InternationalString[] copy = new InternationalString[strings.length];
for (int i=0; i<strings.length; i++) {
copy[i] = toInternationalString(strings[i]);
}
return copy;
}
/**
* Returns the given international string in the given locale, or {@code null} if the given string is null.
* If the given locale is {@code null}, then the {@code i18n} default locale is used.
*
* @param i18n the international string to get as a localized string, or {@code null} if none.
* @param locale the desired locale, or {@code null} for the {@code i18n} default locale.
* @return the localized string, or {@code null} if {@code i18n} is {@code null}.
*
* @since 0.8
*/
public static String toString(final InternationalString i18n, final Locale locale) {
return (i18n == null) ? null : (locale == null) ? i18n.toString() : i18n.toString(locale);
}
}
|
googleapis/google-cloud-java | 37,696 | java-chronicle/proto-google-cloud-chronicle-v1/src/main/java/com/google/cloud/chronicle/v1/ListDataAccessLabelsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/chronicle/v1/data_access_control.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.chronicle.v1;
/**
*
*
* <pre>
* Request message for ListDataAccessLabels.
* </pre>
*
* Protobuf type {@code google.cloud.chronicle.v1.ListDataAccessLabelsRequest}
*/
public final class ListDataAccessLabelsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.chronicle.v1.ListDataAccessLabelsRequest)
ListDataAccessLabelsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDataAccessLabelsRequest.newBuilder() to construct.
private ListDataAccessLabelsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDataAccessLabelsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDataAccessLabelsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.chronicle.v1.DataAccessProto
.internal_static_google_cloud_chronicle_v1_ListDataAccessLabelsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.chronicle.v1.DataAccessProto
.internal_static_google_cloud_chronicle_v1_ListDataAccessLabelsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest.class,
com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource where this data access label will be created.
* Format: `projects/{project}/locations/{location}/instances/{instance}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent resource where this data access label will be created.
* Format: `projects/{project}/locations/{location}/instances/{instance}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of data access labels to return. The service may return
* fewer than this value. If unspecified, at most 100 data access labels will
* be returned. The maximum value is 1000; values above 1000 will be coerced
* to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous `ListDataAccessLabelsRequest` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListDataAccessLabelsRequest` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. A filter which should follow the guidelines of AIP-160.
* Supports filtering on all fieds of DataAccessLabel and all operations as
* mentioned in https://google.aip.dev/160.
* example filter: "create_time greater than \"2023-04-21T11:30:00-04:00\" OR
* display_name:\"-21-1\"".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A filter which should follow the guidelines of AIP-160.
* Supports filtering on all fieds of DataAccessLabel and all operations as
* mentioned in https://google.aip.dev/160.
* example filter: "create_time greater than \"2023-04-21T11:30:00-04:00\" OR
* display_name:\"-21-1\"".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest)) {
return super.equals(obj);
}
com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest other =
(com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for ListDataAccessLabels.
* </pre>
*
* Protobuf type {@code google.cloud.chronicle.v1.ListDataAccessLabelsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.chronicle.v1.ListDataAccessLabelsRequest)
com.google.cloud.chronicle.v1.ListDataAccessLabelsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.chronicle.v1.DataAccessProto
.internal_static_google_cloud_chronicle_v1_ListDataAccessLabelsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.chronicle.v1.DataAccessProto
.internal_static_google_cloud_chronicle_v1_ListDataAccessLabelsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest.class,
com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest.Builder.class);
}
// Construct using com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.chronicle.v1.DataAccessProto
.internal_static_google_cloud_chronicle_v1_ListDataAccessLabelsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest getDefaultInstanceForType() {
return com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest build() {
com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest buildPartial() {
com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest result =
new com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest) {
return mergeFrom((com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest other) {
if (other == com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource where this data access label will be created.
* Format: `projects/{project}/locations/{location}/instances/{instance}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource where this data access label will be created.
* Format: `projects/{project}/locations/{location}/instances/{instance}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource where this data access label will be created.
* Format: `projects/{project}/locations/{location}/instances/{instance}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource where this data access label will be created.
* Format: `projects/{project}/locations/{location}/instances/{instance}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource where this data access label will be created.
* Format: `projects/{project}/locations/{location}/instances/{instance}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of data access labels to return. The service may return
* fewer than this value. If unspecified, at most 100 data access labels will
* be returned. The maximum value is 1000; values above 1000 will be coerced
* to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of data access labels to return. The service may return
* fewer than this value. If unspecified, at most 100 data access labels will
* be returned. The maximum value is 1000; values above 1000 will be coerced
* to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of data access labels to return. The service may return
* fewer than this value. If unspecified, at most 100 data access labels will
* be returned. The maximum value is 1000; values above 1000 will be coerced
* to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous `ListDataAccessLabelsRequest` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListDataAccessLabelsRequest` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListDataAccessLabelsRequest` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous `ListDataAccessLabelsRequest` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous `ListDataAccessLabelsRequest` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. A filter which should follow the guidelines of AIP-160.
* Supports filtering on all fieds of DataAccessLabel and all operations as
* mentioned in https://google.aip.dev/160.
* example filter: "create_time greater than \"2023-04-21T11:30:00-04:00\" OR
* display_name:\"-21-1\"".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A filter which should follow the guidelines of AIP-160.
* Supports filtering on all fieds of DataAccessLabel and all operations as
* mentioned in https://google.aip.dev/160.
* example filter: "create_time greater than \"2023-04-21T11:30:00-04:00\" OR
* display_name:\"-21-1\"".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A filter which should follow the guidelines of AIP-160.
* Supports filtering on all fieds of DataAccessLabel and all operations as
* mentioned in https://google.aip.dev/160.
* example filter: "create_time greater than \"2023-04-21T11:30:00-04:00\" OR
* display_name:\"-21-1\"".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A filter which should follow the guidelines of AIP-160.
* Supports filtering on all fieds of DataAccessLabel and all operations as
* mentioned in https://google.aip.dev/160.
* example filter: "create_time greater than \"2023-04-21T11:30:00-04:00\" OR
* display_name:\"-21-1\"".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A filter which should follow the guidelines of AIP-160.
* Supports filtering on all fieds of DataAccessLabel and all operations as
* mentioned in https://google.aip.dev/160.
* example filter: "create_time greater than \"2023-04-21T11:30:00-04:00\" OR
* display_name:\"-21-1\"".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.chronicle.v1.ListDataAccessLabelsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.chronicle.v1.ListDataAccessLabelsRequest)
private static final com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest();
}
public static com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDataAccessLabelsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListDataAccessLabelsRequest>() {
@java.lang.Override
public ListDataAccessLabelsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDataAccessLabelsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDataAccessLabelsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.chronicle.v1.ListDataAccessLabelsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,696 | java-chronicle/proto-google-cloud-chronicle-v1/src/main/java/com/google/cloud/chronicle/v1/ListDataAccessScopesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/chronicle/v1/data_access_control.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.chronicle.v1;
/**
*
*
* <pre>
* Request message for ListDataAccessScopes.
* </pre>
*
* Protobuf type {@code google.cloud.chronicle.v1.ListDataAccessScopesRequest}
*/
public final class ListDataAccessScopesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.chronicle.v1.ListDataAccessScopesRequest)
ListDataAccessScopesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDataAccessScopesRequest.newBuilder() to construct.
private ListDataAccessScopesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDataAccessScopesRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDataAccessScopesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.chronicle.v1.DataAccessProto
.internal_static_google_cloud_chronicle_v1_ListDataAccessScopesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.chronicle.v1.DataAccessProto
.internal_static_google_cloud_chronicle_v1_ListDataAccessScopesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.chronicle.v1.ListDataAccessScopesRequest.class,
com.google.cloud.chronicle.v1.ListDataAccessScopesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource where this data access scope will be created.
* Format: `projects/{project}/locations/{location}/instances/{instance}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent resource where this data access scope will be created.
* Format: `projects/{project}/locations/{location}/instances/{instance}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of data access scopes to return. The service may return
* fewer than this value. If unspecified, at most 100 data access scopes will
* be returned. The maximum value is 1000; values above 1000 will be coerced
* to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous `ListDataAccessScopesRequest` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListDataAccessScopesRequest` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. A filter which should follow the guidelines of AIP-160.
* Supports filtering on all fieds of DataAccessScope and all operations as
* mentioned in https://google.aip.dev/160.
* example filter: "create_time greater than \"2023-04-21T11:30:00-04:00\" OR
* display_name:\"-21-1\"".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A filter which should follow the guidelines of AIP-160.
* Supports filtering on all fieds of DataAccessScope and all operations as
* mentioned in https://google.aip.dev/160.
* example filter: "create_time greater than \"2023-04-21T11:30:00-04:00\" OR
* display_name:\"-21-1\"".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.chronicle.v1.ListDataAccessScopesRequest)) {
return super.equals(obj);
}
com.google.cloud.chronicle.v1.ListDataAccessScopesRequest other =
(com.google.cloud.chronicle.v1.ListDataAccessScopesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.chronicle.v1.ListDataAccessScopesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.chronicle.v1.ListDataAccessScopesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.ListDataAccessScopesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.chronicle.v1.ListDataAccessScopesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.ListDataAccessScopesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.chronicle.v1.ListDataAccessScopesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.ListDataAccessScopesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.chronicle.v1.ListDataAccessScopesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.ListDataAccessScopesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.chronicle.v1.ListDataAccessScopesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.ListDataAccessScopesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.chronicle.v1.ListDataAccessScopesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.chronicle.v1.ListDataAccessScopesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for ListDataAccessScopes.
* </pre>
*
* Protobuf type {@code google.cloud.chronicle.v1.ListDataAccessScopesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.chronicle.v1.ListDataAccessScopesRequest)
com.google.cloud.chronicle.v1.ListDataAccessScopesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.chronicle.v1.DataAccessProto
.internal_static_google_cloud_chronicle_v1_ListDataAccessScopesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.chronicle.v1.DataAccessProto
.internal_static_google_cloud_chronicle_v1_ListDataAccessScopesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.chronicle.v1.ListDataAccessScopesRequest.class,
com.google.cloud.chronicle.v1.ListDataAccessScopesRequest.Builder.class);
}
// Construct using com.google.cloud.chronicle.v1.ListDataAccessScopesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.chronicle.v1.DataAccessProto
.internal_static_google_cloud_chronicle_v1_ListDataAccessScopesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.chronicle.v1.ListDataAccessScopesRequest getDefaultInstanceForType() {
return com.google.cloud.chronicle.v1.ListDataAccessScopesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.chronicle.v1.ListDataAccessScopesRequest build() {
com.google.cloud.chronicle.v1.ListDataAccessScopesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.chronicle.v1.ListDataAccessScopesRequest buildPartial() {
com.google.cloud.chronicle.v1.ListDataAccessScopesRequest result =
new com.google.cloud.chronicle.v1.ListDataAccessScopesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.chronicle.v1.ListDataAccessScopesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.chronicle.v1.ListDataAccessScopesRequest) {
return mergeFrom((com.google.cloud.chronicle.v1.ListDataAccessScopesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.chronicle.v1.ListDataAccessScopesRequest other) {
if (other == com.google.cloud.chronicle.v1.ListDataAccessScopesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource where this data access scope will be created.
* Format: `projects/{project}/locations/{location}/instances/{instance}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource where this data access scope will be created.
* Format: `projects/{project}/locations/{location}/instances/{instance}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource where this data access scope will be created.
* Format: `projects/{project}/locations/{location}/instances/{instance}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource where this data access scope will be created.
* Format: `projects/{project}/locations/{location}/instances/{instance}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource where this data access scope will be created.
* Format: `projects/{project}/locations/{location}/instances/{instance}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of data access scopes to return. The service may return
* fewer than this value. If unspecified, at most 100 data access scopes will
* be returned. The maximum value is 1000; values above 1000 will be coerced
* to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of data access scopes to return. The service may return
* fewer than this value. If unspecified, at most 100 data access scopes will
* be returned. The maximum value is 1000; values above 1000 will be coerced
* to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of data access scopes to return. The service may return
* fewer than this value. If unspecified, at most 100 data access scopes will
* be returned. The maximum value is 1000; values above 1000 will be coerced
* to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous `ListDataAccessScopesRequest` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListDataAccessScopesRequest` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListDataAccessScopesRequest` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous `ListDataAccessScopesRequest` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous `ListDataAccessScopesRequest` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. A filter which should follow the guidelines of AIP-160.
* Supports filtering on all fieds of DataAccessScope and all operations as
* mentioned in https://google.aip.dev/160.
* example filter: "create_time greater than \"2023-04-21T11:30:00-04:00\" OR
* display_name:\"-21-1\"".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A filter which should follow the guidelines of AIP-160.
* Supports filtering on all fieds of DataAccessScope and all operations as
* mentioned in https://google.aip.dev/160.
* example filter: "create_time greater than \"2023-04-21T11:30:00-04:00\" OR
* display_name:\"-21-1\"".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A filter which should follow the guidelines of AIP-160.
* Supports filtering on all fieds of DataAccessScope and all operations as
* mentioned in https://google.aip.dev/160.
* example filter: "create_time greater than \"2023-04-21T11:30:00-04:00\" OR
* display_name:\"-21-1\"".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A filter which should follow the guidelines of AIP-160.
* Supports filtering on all fieds of DataAccessScope and all operations as
* mentioned in https://google.aip.dev/160.
* example filter: "create_time greater than \"2023-04-21T11:30:00-04:00\" OR
* display_name:\"-21-1\"".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A filter which should follow the guidelines of AIP-160.
* Supports filtering on all fieds of DataAccessScope and all operations as
* mentioned in https://google.aip.dev/160.
* example filter: "create_time greater than \"2023-04-21T11:30:00-04:00\" OR
* display_name:\"-21-1\"".
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.chronicle.v1.ListDataAccessScopesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.chronicle.v1.ListDataAccessScopesRequest)
private static final com.google.cloud.chronicle.v1.ListDataAccessScopesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.chronicle.v1.ListDataAccessScopesRequest();
}
public static com.google.cloud.chronicle.v1.ListDataAccessScopesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDataAccessScopesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListDataAccessScopesRequest>() {
@java.lang.Override
public ListDataAccessScopesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDataAccessScopesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDataAccessScopesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.chronicle.v1.ListDataAccessScopesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,732 | java-datalineage/proto-google-cloud-datalineage-v1/src/main/java/com/google/cloud/datacatalog/lineage/v1/CreateLineageEventRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datacatalog/lineage/v1/lineage.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datacatalog.lineage.v1;
/**
*
*
* <pre>
* Request message for
* [CreateLineageEvent][google.cloud.datacatalog.lineage.v1.CreateLineageEvent].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest}
*/
public final class CreateLineageEventRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest)
CreateLineageEventRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateLineageEventRequest.newBuilder() to construct.
private CreateLineageEventRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateLineageEventRequest() {
parent_ = "";
requestId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateLineageEventRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_CreateLineageEventRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_CreateLineageEventRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest.class,
com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the run that should own the lineage event.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the run that should own the lineage event.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LINEAGE_EVENT_FIELD_NUMBER = 2;
private com.google.cloud.datacatalog.lineage.v1.LineageEvent lineageEvent_;
/**
*
*
* <pre>
* Required. The lineage event to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.LineageEvent lineage_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the lineageEvent field is set.
*/
@java.lang.Override
public boolean hasLineageEvent() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The lineage event to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.LineageEvent lineage_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The lineageEvent.
*/
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.LineageEvent getLineageEvent() {
return lineageEvent_ == null
? com.google.cloud.datacatalog.lineage.v1.LineageEvent.getDefaultInstance()
: lineageEvent_;
}
/**
*
*
* <pre>
* Required. The lineage event to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.LineageEvent lineage_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.LineageEventOrBuilder getLineageEventOrBuilder() {
return lineageEvent_ == null
? com.google.cloud.datacatalog.lineage.v1.LineageEvent.getDefaultInstance()
: lineageEvent_;
}
public static final int REQUEST_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getLineageEvent());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, requestId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getLineageEvent());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, requestId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest)) {
return super.equals(obj);
}
com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest other =
(com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasLineageEvent() != other.hasLineageEvent()) return false;
if (hasLineageEvent()) {
if (!getLineageEvent().equals(other.getLineageEvent())) return false;
}
if (!getRequestId().equals(other.getRequestId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasLineageEvent()) {
hash = (37 * hash) + LINEAGE_EVENT_FIELD_NUMBER;
hash = (53 * hash) + getLineageEvent().hashCode();
}
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [CreateLineageEvent][google.cloud.datacatalog.lineage.v1.CreateLineageEvent].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest)
com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_CreateLineageEventRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_CreateLineageEventRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest.class,
com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest.Builder.class);
}
// Construct using
// com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getLineageEventFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
lineageEvent_ = null;
if (lineageEventBuilder_ != null) {
lineageEventBuilder_.dispose();
lineageEventBuilder_ = null;
}
requestId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_CreateLineageEventRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest
getDefaultInstanceForType() {
return com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest build() {
com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest buildPartial() {
com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest result =
new com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.lineageEvent_ =
lineageEventBuilder_ == null ? lineageEvent_ : lineageEventBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.requestId_ = requestId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest) {
return mergeFrom((com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest other) {
if (other
== com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasLineageEvent()) {
mergeLineageEvent(other.getLineageEvent());
}
if (!other.getRequestId().isEmpty()) {
requestId_ = other.requestId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getLineageEventFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
requestId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the run that should own the lineage event.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the run that should own the lineage event.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the run that should own the lineage event.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the run that should own the lineage event.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the run that should own the lineage event.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.datacatalog.lineage.v1.LineageEvent lineageEvent_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.lineage.v1.LineageEvent,
com.google.cloud.datacatalog.lineage.v1.LineageEvent.Builder,
com.google.cloud.datacatalog.lineage.v1.LineageEventOrBuilder>
lineageEventBuilder_;
/**
*
*
* <pre>
* Required. The lineage event to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.LineageEvent lineage_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the lineageEvent field is set.
*/
public boolean hasLineageEvent() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The lineage event to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.LineageEvent lineage_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The lineageEvent.
*/
public com.google.cloud.datacatalog.lineage.v1.LineageEvent getLineageEvent() {
if (lineageEventBuilder_ == null) {
return lineageEvent_ == null
? com.google.cloud.datacatalog.lineage.v1.LineageEvent.getDefaultInstance()
: lineageEvent_;
} else {
return lineageEventBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The lineage event to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.LineageEvent lineage_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setLineageEvent(com.google.cloud.datacatalog.lineage.v1.LineageEvent value) {
if (lineageEventBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
lineageEvent_ = value;
} else {
lineageEventBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The lineage event to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.LineageEvent lineage_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setLineageEvent(
com.google.cloud.datacatalog.lineage.v1.LineageEvent.Builder builderForValue) {
if (lineageEventBuilder_ == null) {
lineageEvent_ = builderForValue.build();
} else {
lineageEventBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The lineage event to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.LineageEvent lineage_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeLineageEvent(com.google.cloud.datacatalog.lineage.v1.LineageEvent value) {
if (lineageEventBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& lineageEvent_ != null
&& lineageEvent_
!= com.google.cloud.datacatalog.lineage.v1.LineageEvent.getDefaultInstance()) {
getLineageEventBuilder().mergeFrom(value);
} else {
lineageEvent_ = value;
}
} else {
lineageEventBuilder_.mergeFrom(value);
}
if (lineageEvent_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The lineage event to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.LineageEvent lineage_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearLineageEvent() {
bitField0_ = (bitField0_ & ~0x00000002);
lineageEvent_ = null;
if (lineageEventBuilder_ != null) {
lineageEventBuilder_.dispose();
lineageEventBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The lineage event to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.LineageEvent lineage_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.datacatalog.lineage.v1.LineageEvent.Builder getLineageEventBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getLineageEventFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The lineage event to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.LineageEvent lineage_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.datacatalog.lineage.v1.LineageEventOrBuilder
getLineageEventOrBuilder() {
if (lineageEventBuilder_ != null) {
return lineageEventBuilder_.getMessageOrBuilder();
} else {
return lineageEvent_ == null
? com.google.cloud.datacatalog.lineage.v1.LineageEvent.getDefaultInstance()
: lineageEvent_;
}
}
/**
*
*
* <pre>
* Required. The lineage event to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.LineageEvent lineage_event = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.lineage.v1.LineageEvent,
com.google.cloud.datacatalog.lineage.v1.LineageEvent.Builder,
com.google.cloud.datacatalog.lineage.v1.LineageEventOrBuilder>
getLineageEventFieldBuilder() {
if (lineageEventBuilder_ == null) {
lineageEventBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.lineage.v1.LineageEvent,
com.google.cloud.datacatalog.lineage.v1.LineageEvent.Builder,
com.google.cloud.datacatalog.lineage.v1.LineageEventOrBuilder>(
getLineageEvent(), getParentForChildren(), isClean());
lineageEvent_ = null;
}
return lineageEventBuilder_;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
requestId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
requestId_ = getDefaultInstance().getRequestId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
requestId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest)
private static final com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest();
}
public static com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateLineageEventRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateLineageEventRequest>() {
@java.lang.Override
public CreateLineageEventRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateLineageEventRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateLineageEventRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,849 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/DeleteFirewallRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* A request message for Firewalls.Delete. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.DeleteFirewallRequest}
*/
public final class DeleteFirewallRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.DeleteFirewallRequest)
DeleteFirewallRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteFirewallRequest.newBuilder() to construct.
private DeleteFirewallRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeleteFirewallRequest() {
firewall_ = "";
project_ = "";
requestId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DeleteFirewallRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_DeleteFirewallRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_DeleteFirewallRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.DeleteFirewallRequest.class,
com.google.cloud.compute.v1.DeleteFirewallRequest.Builder.class);
}
private int bitField0_;
public static final int FIREWALL_FIELD_NUMBER = 511016192;
@SuppressWarnings("serial")
private volatile java.lang.Object firewall_ = "";
/**
*
*
* <pre>
* Name of the firewall rule to delete.
* </pre>
*
* <code>string firewall = 511016192 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The firewall.
*/
@java.lang.Override
public java.lang.String getFirewall() {
java.lang.Object ref = firewall_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
firewall_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the firewall rule to delete.
* </pre>
*
* <code>string firewall = 511016192 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for firewall.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFirewallBytes() {
java.lang.Object ref = firewall_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
firewall_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PROJECT_FIELD_NUMBER = 227560217;
@SuppressWarnings("serial")
private volatile java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REQUEST_ID_FIELD_NUMBER = 37109963;
@SuppressWarnings("serial")
private volatile java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return Whether the requestId field is set.
*/
@java.lang.Override
public boolean hasRequestId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 37109963, requestId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(firewall_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 511016192, firewall_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(37109963, requestId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(firewall_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(511016192, firewall_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.DeleteFirewallRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.DeleteFirewallRequest other =
(com.google.cloud.compute.v1.DeleteFirewallRequest) obj;
if (!getFirewall().equals(other.getFirewall())) return false;
if (!getProject().equals(other.getProject())) return false;
if (hasRequestId() != other.hasRequestId()) return false;
if (hasRequestId()) {
if (!getRequestId().equals(other.getRequestId())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + FIREWALL_FIELD_NUMBER;
hash = (53 * hash) + getFirewall().hashCode();
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
if (hasRequestId()) {
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.DeleteFirewallRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.DeleteFirewallRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.DeleteFirewallRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.DeleteFirewallRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.DeleteFirewallRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.DeleteFirewallRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.DeleteFirewallRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.DeleteFirewallRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.DeleteFirewallRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.DeleteFirewallRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.DeleteFirewallRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.DeleteFirewallRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.compute.v1.DeleteFirewallRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request message for Firewalls.Delete. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.DeleteFirewallRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.DeleteFirewallRequest)
com.google.cloud.compute.v1.DeleteFirewallRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_DeleteFirewallRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_DeleteFirewallRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.DeleteFirewallRequest.class,
com.google.cloud.compute.v1.DeleteFirewallRequest.Builder.class);
}
// Construct using com.google.cloud.compute.v1.DeleteFirewallRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
firewall_ = "";
project_ = "";
requestId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_DeleteFirewallRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.DeleteFirewallRequest getDefaultInstanceForType() {
return com.google.cloud.compute.v1.DeleteFirewallRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.DeleteFirewallRequest build() {
com.google.cloud.compute.v1.DeleteFirewallRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.DeleteFirewallRequest buildPartial() {
com.google.cloud.compute.v1.DeleteFirewallRequest result =
new com.google.cloud.compute.v1.DeleteFirewallRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.compute.v1.DeleteFirewallRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.firewall_ = firewall_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.project_ = project_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.requestId_ = requestId_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.DeleteFirewallRequest) {
return mergeFrom((com.google.cloud.compute.v1.DeleteFirewallRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.DeleteFirewallRequest other) {
if (other == com.google.cloud.compute.v1.DeleteFirewallRequest.getDefaultInstance())
return this;
if (!other.getFirewall().isEmpty()) {
firewall_ = other.firewall_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getProject().isEmpty()) {
project_ = other.project_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasRequestId()) {
requestId_ = other.requestId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 296879706:
{
requestId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 296879706
case 1820481738:
{
project_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 1820481738
case -206837758:
{
firewall_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case -206837758
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object firewall_ = "";
/**
*
*
* <pre>
* Name of the firewall rule to delete.
* </pre>
*
* <code>string firewall = 511016192 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The firewall.
*/
public java.lang.String getFirewall() {
java.lang.Object ref = firewall_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
firewall_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the firewall rule to delete.
* </pre>
*
* <code>string firewall = 511016192 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for firewall.
*/
public com.google.protobuf.ByteString getFirewallBytes() {
java.lang.Object ref = firewall_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
firewall_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the firewall rule to delete.
* </pre>
*
* <code>string firewall = 511016192 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The firewall to set.
* @return This builder for chaining.
*/
public Builder setFirewall(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
firewall_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the firewall rule to delete.
* </pre>
*
* <code>string firewall = 511016192 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearFirewall() {
firewall_ = getDefaultInstance().getFirewall();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the firewall rule to delete.
* </pre>
*
* <code>string firewall = 511016192 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for firewall to set.
* @return This builder for chaining.
*/
public Builder setFirewallBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
firewall_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return Whether the requestId field is set.
*/
public boolean hasRequestId() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
requestId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
requestId_ = getDefaultInstance().getRequestId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
requestId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.DeleteFirewallRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.DeleteFirewallRequest)
private static final com.google.cloud.compute.v1.DeleteFirewallRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.DeleteFirewallRequest();
}
public static com.google.cloud.compute.v1.DeleteFirewallRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeleteFirewallRequest> PARSER =
new com.google.protobuf.AbstractParser<DeleteFirewallRequest>() {
@java.lang.Override
public DeleteFirewallRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DeleteFirewallRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeleteFirewallRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.DeleteFirewallRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/incubator-retired-wave | 37,862 | wave/src/main/java/org/waveprotocol/wave/model/document/util/DocHelper.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.waveprotocol.wave.model.document.util;
import org.waveprotocol.wave.model.document.MutableDocument;
import org.waveprotocol.wave.model.document.ReadableDocument;
import org.waveprotocol.wave.model.document.ReadableWDocument;
import org.waveprotocol.wave.model.document.indexed.LocationMapper;
import org.waveprotocol.wave.model.document.operation.Attributes;
import org.waveprotocol.wave.model.document.raw.TextNodeOrganiser;
import org.waveprotocol.wave.model.document.raw.impl.Element;
import org.waveprotocol.wave.model.document.raw.impl.Node;
import org.waveprotocol.wave.model.document.raw.impl.Text;
import org.waveprotocol.wave.model.util.CollectionUtils;
import org.waveprotocol.wave.model.util.IdentityMap;
import org.waveprotocol.wave.model.util.Preconditions;
/**
* Miscellaneous document helper functions
*
* @author danilatos@google.com (Daniel Danilatos)
*/
//
// DO NOT JUST PUT ANY ARBITRARY MISCELLANEOUS STUFF IN HERE
//
// (Please think of the big picture - there is too much overlap
// of partially useful utility methods)
//
// If in doubt, send CL to dan
//
// ALL NEW METHODS MUST BE 100% THOROUGHLY UNIT TESTED
//
public class DocHelper {
/**
* Expectations for top-level element existence, used by
* {@link #getOrCreateFirstTopLevelElement(MutableDocument, String, Expectation)}
* . Since the code that uses this does its own interpretation, it is a
* requirement that the semantic intersection of any two values is empty.
*/
private enum Expectation {
NONE,
ABSENT,
PRESENT
}
/**
* "Call" this method so the compiler can help us find code that will break
* when we make the root an implicit object, and location zero refers to its
* first child.
*
* A lot of test cases will need 1 subtracted from their use of hard coded
* integer location values
*/
public static void noteCodeThatWillBreakWithMultipleRoots() {
}
private static class NodeOffset<N> {
/**
* If is an element, then means "node after", and offset is meaningless
* Otherwise, the NodeOffset is the same as an inTextNode point
*/
N node;
int offset;
}
/**
* Action that can be applied to a node.
*
* @param <N> Node
*/
public interface NodeAction<N> {
void apply(N node);
}
private DocHelper() { }
/**
* Checks whether a location has some text immediately to its left.
*
* @return true if text data precedes the given location
*/
public static <N, E extends N, T extends N> boolean textPrecedes(
ReadableDocument<N, E, T> doc, LocationMapper<N> mapper, int location) {
Point<N> point = mapper.locate(location);
if (point.isInTextNode()) {
return point.getTextOffset() > 0
|| doc.asText(doc.getPreviousSibling(point.getContainer())) != null;
} else {
return doc.asText(Point.nodeBefore(doc, point.asElementPoint())) != null;
}
}
/**
* Checks whether a location has some text immediately to its right.
*
* @return true if text data follows the given location
*/
public static <N, E extends N, T extends N> boolean textFollows(
LocationMapper<N> mapper, int location) {
// Locating points always biases to the right, so this case is easy
return mapper.locate(location).asTextPoint() != null;
}
/**
* Returns the first element in the doc with the given tag name. The root
* element will never match.
*
* @param doc document to look in
* @param tagName tag name to find
* @return the first element in the doc with tagName, or null if none exist
*/
public static <N, E extends N> E getElementWithTagName(
ReadableDocument<N, E, ?> doc, String tagName) {
return getElementWithTagName(doc, tagName, doc.getDocumentElement());
}
/**
* Returns the first element in a subtree with the given tag name. The subtree
* root will never match.
*
* @param doc document to look in
* @param tagName tag name to find
* @param subtreeRoot of the subtree to search (exclusive)
* @return the first element in the subtree with tagName, or null if none
* exist
*/
public static <N, E extends N> E getElementWithTagName(ReadableDocument<N, E, ?> doc,
String tagName, E subtreeRoot) {
N node = DocHelper.getNextNodeDepthFirst(doc, subtreeRoot, subtreeRoot, true);
while (node != null) {
E element = doc.asElement(node);
if (element != null) {
if (doc.getTagName(element).equals(tagName)) {
return element;
}
}
node = DocHelper.getNextNodeDepthFirst(doc, node, subtreeRoot, true);
}
return null;
}
/**
* Returns the last element in the doc with the given tag name. The subtree root
* element will never match.
*
* @param doc document to look in
* @param tagName tag name to find
* @return the last element in the doc with tagName, or null if none exist
*/
public static <N, E extends N> E getLastElementWithTagName(
ReadableDocument<N, E, ?> doc, String tagName) {
return getLastElementWithTagName(doc, tagName, doc.getDocumentElement());
}
/**
* Returns the last element in a subtree with the given tag name. The subtree
* root will never match.
*
* @param doc document to look in
* @param tagName tag name to find
* @return the last element in the subtree with tagName, or null if none exist
*/
public static <N, E extends N> E getLastElementWithTagName(ReadableDocument<N, E, ?> doc,
String tagName, E subtreeRoot) {
N node = DocHelper.getPrevNodeDepthFirst(doc, subtreeRoot, subtreeRoot, true);
while (node != null) {
E element = doc.asElement(node);
if (element != null) {
if (doc.getTagName(element).equals(tagName)) {
return element;
}
}
node = DocHelper.getPrevNodeDepthFirst(doc, node, subtreeRoot, true);
}
return null;
}
/**
* Get the text within the given element.
*/
public static <N, E extends N, T extends N> String getText(ReadableWDocument<N, E, T> doc,
E element) {
return getText(doc, doc, element);
}
/**
* Get the text within the given element.
*/
public static <N, E extends N, T extends N> String getText(ReadableDocument<N, E, T> doc,
LocationMapper<N> mapper, E element) {
int start = mapper.getLocation(Point.start(doc, element));
int end = mapper.getLocation(Point.<N>end(element));
return DocHelper.getText(doc, mapper, start, end);
}
/**
* Shortcut to get the text for an element with a specific tag name.
* @see DocHelper#getElementWithTagName(ReadableDocument, String)
* @see DocHelper#getText(ReadableDocument, LocationMapper, Object)
*/
public static <N> String getTextForElement(
ReadableWDocument<N, ?, ?> doc, String tagName) {
return getTextForElement(doc, doc, tagName);
}
/**
* Shortcut to get the text for an element with a specific tag name.
* @see DocHelper#getElementWithTagName(ReadableDocument, String)
* @see DocHelper#getText(ReadableDocument, LocationMapper, Object)
*/
public static <N, E extends N, T extends N> String getTextForElement(
ReadableDocument<N, E, T> doc, LocationMapper<N> mapper, String tagName) {
E element = getElementWithTagName(doc, tagName);
if (element != null) {
return getText(doc, mapper, element);
}
return null;
}
/**
* Variant that accepts an indexed document instead
* @see #getText(ReadableDocument, LocationMapper, int, int)
*/
public static <N> String getText(ReadableWDocument<N, ?, ?> doc, int start, int end) {
return getText(doc, doc, start, end);
}
/**
* Gets text between two locations, using a mapper to convert to points.
* @see #getText(ReadableDocument, Point, Point)
*/
public static <N, E extends N, T extends N> String getText(
ReadableDocument<N, E, T> doc, LocationMapper<N> mapper,
int start, int end) {
Preconditions.checkPositionIndexes(start, end, mapper.size());
Point<N> startPoint = mapper.locate(start);
Point<N> endPoint = mapper.locate(end);
return getText(doc, startPoint, endPoint);
}
/** Get the text between a given range */
public static <N, E extends N, T extends N> String getText(
ReadableDocument<N, E, T> doc, Point<N> startPoint, Point<N> endPoint) {
NodeOffset<N> output = new NodeOffset<N>();
getNodeAfterOutwards(doc, startPoint, output);
N startNode = output.node;
int startOffset = output.offset;
getNodeAfterOutwards(doc, endPoint, output);
N endNode = output.node;
int endOffset = output.offset;
if (startNode == null) {
return "";
}
T text = doc.asText(startNode);
if (doc.isSameNode(startNode, endNode)) {
return text == null ? "" : doc.getData(text).substring(startOffset, endOffset);
}
StringBuilder str = new StringBuilder();
if (text != null) {
str.append(doc.getData(text).substring(startOffset));
}
N node = getNextNodeDepthFirst(doc, startNode, null, true);
while (node != endNode) {
text = doc.asText(node);
if (text != null) {
str.append(doc.getData(text));
}
node = getNextNodeDepthFirst(doc, node, null, true);
}
text = doc.asText(node);
if (text != null) {
str.append(doc.getData(text).substring(0, endOffset));
}
return str.toString();
}
/**
* Step out of end tags, so we get something that is either in a text node,
* or the node after our point in a pre-order traversal
*/
private static <N, E extends N, T extends N> void getNodeAfterOutwards(
ReadableDocument<N, E, T> doc, Point<N> point, NodeOffset<N> output) {
N node;
int startOffset;
if (point.isInTextNode()) {
node = point.getContainer();
startOffset = point.getTextOffset();
} else {
node = point.getNodeAfter();
if (node == null) {
N parent = point.getContainer();
while (parent != null) {
node = doc.getNextSibling(parent);
if (node != null) {
break;
}
parent = doc.getParentElement(parent);
}
}
startOffset = 0;
}
output.node = node;
output.offset = startOffset;
}
/**
* Get the next node in a depth first traversal.
*
* TODO(danilatos): Move this somewhere common (and use for better filtered
* traversals).
*
* @param doc The view to use
* @param start The node to start from
* @param stopAt If we reach this node, return null. If already in the node,
* will only stop while exiting having traversed all its children. If
* we start outside it, it will not be entered.
* @param enter Enter the start node if it is an element (false to skip its
* children - only applies to the start node)
*/
public static <N> N getNextNodeDepthFirst(
ReadableDocument<N, ?, ?> doc, N start, N stopAt, boolean enter) {
return getNextOrPrevNodeDepthFirst(doc, start, stopAt, enter, true);
}
/**
* Same as {@link #getNextNodeDepthFirst(ReadableDocument, Object, Object, boolean)},
* but goes in the other direction
*/
public static <N> N getPrevNodeDepthFirst(
ReadableDocument<N, ?, ?> doc, N start, N stopAt, boolean enter) {
return getNextOrPrevNodeDepthFirst(doc, start, stopAt, enter, false);
}
/**
* Same as {@link #getNextNodeDepthFirst(ReadableDocument, Object, Object, boolean)}
* and {@link #getPrevNodeDepthFirst(ReadableDocument, Object, Object, boolean)}
* except direction is parametrised.
* @param rightwards If true, then go rightwards, otherwise leftwards.
*/
public static <N, E extends N, T extends N> N getNextOrPrevNodeDepthFirst(
ReadableDocument<N, E, T> doc, N start, N stopAt, boolean enter, boolean rightwards) {
// Default stopping place is the very top
if (stopAt == null) {
stopAt = doc.getDocumentElement();
}
// Maybe enter into an element
N next;
if (enter) {
E element = doc.asElement(start);
if (element != null) {
next = rightwards ? doc.getFirstChild(element) : doc.getLastChild(element);
if (next != null) {
return next;
}
}
}
// Go upwards from exiting an element
while (start != null && !doc.isSameNode(start, stopAt)) {
next = rightwards ? doc.getNextSibling(start) : doc.getPreviousSibling(start);
if (doc.isSameNode(next, stopAt)) {
return null;
}
if (next != null) {
return next;
}
start = doc.getParentElement(start);
}
return null;
}
/**
* Same as {@link #getFilteredPoint(ReadableDocumentView, Point)}, but
* returns an integer location
*/
public static <N, E extends N, T extends N> int getFilteredLocation(
LocationMapper<N> locationMapper, ReadableDocumentView<N, E, T> filteredView,
Point<N> point) {
return locationMapper.getLocation(getFilteredPoint(filteredView, point));
}
/**
* Gets the location of a given point in the DOM.
*
* @param filteredView
* @param point
* @return the location of the given point.
*/
public static <N, E extends N, T extends N> Point<N> getFilteredPoint(
ReadableDocumentView<N, E, T> filteredView, Point<N> point) {
filteredView.onBeforeFilter(point);
if (point.isInTextNode()) {
N visible;
visible = filteredView.getVisibleNode(point.getContainer());
if (visible == point.getContainer()) {
return point;
} else {
N next = getNextNodeDepthFirst(filteredView, point.getContainer(), visible, false);
if (next == null) {
return Point.inElement(visible, null);
} else {
return Point.before(filteredView, next);
}
}
} else if (point.getNodeAfter() == null) {
return getLocationOfNodeEnd(filteredView, point.getContainer());
} else {
return getLocationOfBeforeNode(filteredView, point.getNodeAfter());
}
}
/**
* Get location of the end of the inside of the given node
*/
private static <N, E extends N, T extends N> Point<N> getLocationOfNodeEnd(
ReadableDocumentView<N, E, T> doc, N node) {
assert node != null : "Node is null";
N parent = doc.getVisibleNode(node);
assert parent != null : "Parent is null";
if (parent == node) {
return Point.end(node);
}
N next = DocHelper.getNextNodeDepthFirst(doc, node, parent, false);
if (next == null) {
return Point.end(parent);
} else {
return Point.before(doc, next);
}
}
/**
* Get location of the outside of the start of the given node
*/
private static <N, E extends N, T extends N> Point<N> getLocationOfBeforeNode(
ReadableDocumentView<N, E, T> doc, N node) {
assert node != doc.getDocumentElement() : "Cannot get location outside of root element";
N parent = doc.getVisibleNode(node);
if (parent == node) {
return Point.before(doc, node);
}
assert parent != null;
N next = DocHelper.getNextNodeDepthFirst(doc, node, parent, true);
if (next == null) {
return Point.end(parent);
} else {
return Point.before(doc, next);
}
}
public static <N, T extends N> int getItemSize(ReadableWDocument<N, ?, T> doc, N node) {
// Short circuit if it's a text node, implementation is simpler
T textNode = doc.asText(node);
if (textNode != null) {
return doc.getLength(textNode);
}
// Otherwise, calculate two locations and subtract
N parent = doc.getParentElement(node);
if (parent == null) {
// Requesting size of the document root.
// TODO(danilatos/anorth) This would change if we have multiple roots.
noteCodeThatWillBreakWithMultipleRoots();
return doc.size();
}
N next = doc.getNextSibling(node);
int locationAfter = next != null ? doc.getLocation(next)
: doc.getLocation(Point.end(parent));
return locationAfter - doc.getLocation(node);
}
/**
* Normalizes a point so that it is biased towards text nodes, and node ends
* rather than node start.
*
* @param <N>
* @param <E>
* @param <T>
* @param point
* @param doc
*/
public static <N, E extends N, T extends N> Point<N> normalizePoint(Point<N> point,
ReadableDocument<N, E, T> doc) {
N previous = null;
if (!point.isInTextNode()) {
previous = Point.nodeBefore(doc, point.asElementPoint());
T nodeAfterAsText = doc.asText(point.getNodeAfter());
if (nodeAfterAsText != null) {
point = Point.<N>inText(nodeAfterAsText, 0);
}
} else if (point.getTextOffset() == 0) {
previous = doc.getPreviousSibling(point.getContainer());
}
T previousAsText = doc.asText(previous);
if (previous != null && previousAsText != null) {
point = Point.inText(previous, doc.getLength(previousAsText));
}
return point;
}
/**
* Left-aligns a position in a document, given a view over that document of places to align to.
* Achieved by traversing the point backwards through the full document until a position in the
* view is found, then returning a point at that position.
*
* @param current The point in the fullDoc to align
* @param fullDoc Complete document
* @param important view over the complete document
* @return The aligned point in the full document (may use nodes not in the view)
*/
public static <N, E extends N, T extends N> Point<N> leftAlign(Point<N> current,
ReadableDocument<N, E, T> fullDoc, ReadableDocumentView<N, E, T> important) {
if (current == null || current.isInTextNode()) {
return current; // assume text nodes are already aligned
}
N parent = current.getContainer();
N at = current.getNodeAfter();
// calculate the node before the point
N lastBefore = null;
if (at == null) {
lastBefore = fullDoc.getLastChild(parent);
} else {
lastBefore = fullDoc.getPreviousSibling(at);
}
// nothing before the at node, so move up one level
N visibleParent = important.getVisibleNode(parent);
if (lastBefore == null) {
if (parent == visibleParent) {
return Point.textOrElementStart(fullDoc, parent);
}
lastBefore = parent;
}
// and move backwards (starting from right-most child) until we have an important node
N nodeLast = important.getVisibleNodeLast(lastBefore);
N lcaVis = nodeLast == null ? lastBefore : nearestCommonAncestor(fullDoc, nodeLast, lastBefore);
// special case when last visible is a parent - so use visibleParent iff it is a child of lcaVis
if (isAncestor(fullDoc, lcaVis, visibleParent, false)) {
return Point.textOrElementStart(fullDoc, visibleParent);
} else {
lastBefore = nodeLast;
}
// get the child after the node before the new point, then correct the parent in full document.
at = lastBefore == null ? null : important.getNextSibling(lastBefore);
if (at != null) {
parent = fullDoc.getParentElement(at);
} else if (lastBefore != null) {
parent = fullDoc.getParentElement(lastBefore);
}
return at == null ? Point.end(parent) : Point.before(fullDoc, at);
}
/**
* Gets the first child element of an element, if there is one.
*
* @param doc document accessor
* @param element parent element
* @return the first child element of {@code element} if there is one,
* otherwise {@code null}.
*/
public static <N, E extends N> E getFirstChildElement(ReadableDocument<N, E, ?> doc, E element) {
return getNextElementInclusive(doc, doc.getFirstChild(element), true);
}
/**
* Gets the last child element of an element, if there is one.
*
* @param doc document accessor
* @param element parent element
* @return the last child element of {@code element} if there is one,
* otherwise {@code null}.
*/
public static <N, E extends N> E getLastChildElement(ReadableDocument<N, E, ?> doc, E element) {
return getNextElementInclusive(doc, doc.getLastChild(element), false);
}
/**
* Gets the next sibling of an element that is also an element itself.
*
* @param doc document accessor
* @param element an element
* @return the next element sibling of {@code element} if there is one,
* otherwise {@code null}.
*/
public static <N, E extends N> E getNextSiblingElement(ReadableDocument<N, E, ?> doc, E element) {
return getNextElementInclusive(doc, doc.getNextSibling(element), true);
}
/**
* @param doc document accessor.
* @param element a document element.
* @return The previous element sibling of {@code element} if there is one,
* otherwise {@code null}.
*/
public static <N, E extends N> E getPreviousSiblingElement(
ReadableDocument<N, E, ?> doc, E element) {
Preconditions.checkNotNull(element, "Previous element for null element is undefined");
Preconditions.checkNotNull(doc, "Previous element for null document is undefined");
return getNextElementInclusive(doc, doc.getPreviousSibling(element), false);
}
/**
* Returns a node as an element if it is one; otherwise, finds the next
* sibling of that node that is an element.
*
* @param doc document accessor
* @param node reference node
* @return the next element in the inclusive sibling chain from {@code node}.
*/
public static <N, E extends N> E getNextElementInclusive(ReadableDocument<N, E, ?> doc, N node,
boolean forward) {
E asElement = doc.asElement(node);
while (node != null && asElement == null) {
node = forward ? doc.getNextSibling(node) : doc.getPreviousSibling(node);
asElement = doc.asElement(node);
}
return asElement;
}
/**
* Apply action to a node and its descendants.
*
* @param doc view for traversing
* @param node reference node
* @param nodeAction action to apply to node and its descendants
*/
public static <N, E extends N, T extends N> void traverse(ReadableDocument<N, E, T> doc, N node,
NodeAction<N> nodeAction) {
for (; node != null; node = doc.getNextSibling(node)) {
nodeAction.apply(node);
traverse(doc, doc.getFirstChild(node), nodeAction);
}
}
/**
* Ensures the given point is at a node boundary, possibly splitting a text
* node in order to do so, in which case a new point is returned.
*
* @param point
* @return a point at the same place as the input point, guaranteed to be at
* a node boundary.
*/
public static <N, T extends N> Point.El<N> ensureNodeBoundary(Point<N> point,
ReadableDocument<N, ?, T> doc, TextNodeOrganiser<T> textNodeOrganiser) {
Point.Tx<N> textPoint = point.asTextPoint();
if (textPoint != null) {
T textNode = doc.asText(textPoint.getContainer());
N maybeSecond = textNodeOrganiser.splitText(textNode,
textPoint.getTextOffset());
if (maybeSecond != null) {
return Point.inElement(doc.getParentElement(maybeSecond), maybeSecond);
} else {
return Point.inElement(doc.getParentElement(textNode), doc.getNextSibling(textNode));
}
} else {
return point.asElementPoint();
}
}
/**
* Ensures the given point precedes a node, possibly splitting a text
* node in order to do so, and possibly traversing until a node is found.
*
* @param point
* @return a node at the same place as the input point, guaranteed to be at
* a node boundary. If there is no node, the next available node.
*/
public static <N, T extends N> N ensureNodeBoundaryReturnNextNode(Point<N> point,
ReadableDocument<N, ?, T> doc, TextNodeOrganiser<T> textNodeOrganiser) {
Point.Tx<N> textStartPoint = point.asTextPoint();
if (textStartPoint != null) {
T textNode = doc.asText(textStartPoint.getContainer());
N maybeSecond = textNodeOrganiser.splitText(textNode,
textStartPoint.getTextOffset());
if (maybeSecond != null) {
return maybeSecond;
} else {
return getNextNodeDepthFirst(doc, textNode, null, false);
}
} else if (point.getNodeAfter() != null) {
return point.getNodeAfter();
} else {
return getNextNodeDepthFirst(doc, point.getContainer(), null, false);
}
}
/**
* Generalisation of {@link WritableLocalDocument#transparentSlice(Object)},
* allowing a slice at a point, returning a point.
*
* Avoids slicing where possible, including where the splitAt point would map
* to a location in the persistent view corresponding to a point that is also
* valid in the full view.
*/
public static <N, E extends N, T extends N> Point<N> transparentSlice(Point<N> splitAt,
DocumentContext<N, E, T> cxt) {
// Convert to a point in the persistent view
// TODO(danilatos) More efficiently? This is simple but brutish.
int location = getFilteredLocation(cxt.locationMapper(), cxt.persistentView(), splitAt);
Point<N> pPoint = cxt.locationMapper().locate(location);
if (pPoint.isInTextNode()) {
T text = cxt.document().asText(pPoint.getContainer());
E pParent = cxt.document().getParentElement(text);
if (pParent == cxt.annotatableContent().getParentElement(text)) {
return pPoint;
} else {
pPoint = ensureNodeBoundary(pPoint, cxt.document(), cxt.textNodeOrganiser());
}
}
if (pPoint.getNodeAfter() != null) {
N nodeAfter = pPoint.getNodeAfter();
if (cxt.annotatableContent().getParentElement(nodeAfter) != pPoint.getContainer()) {
return Point.inElement(pPoint.getContainer(),
cxt.annotatableContent().transparentSlice(nodeAfter));
} else {
return pPoint;
}
} else {
return pPoint;
}
}
/**
* Counts how many children a particular element in a document has.
*
* @param doc The doc that the element is in.
* @param elem An element.
* @return Number of children the specified element has.
*/
public static <N, E extends N, T extends N> int countChildren(
ReadableDocument<Node, Element, Text> doc, Element elem) {
int children = 0;
Node currentChild = doc.getFirstChild(elem);
while (currentChild != null) {
children++;
currentChild = doc.getNextSibling(currentChild);
}
return children;
}
/**
* Does a linear search from the startNode for an element with the given id
*
* @param doc
* @param subtreeRoot the element to start looking from. Only startNode or it's
* child elements will be found.
* @param id id attribute's value
* @return first matching element, or null if none found
*/
public static <N, E extends N, T extends N> E findElementById(
ReadableDocument<N, E, T> doc, E subtreeRoot, String id) {
return findElementByAttr(doc, subtreeRoot, "id", id);
}
/**
* Does a linear search for an element with the given id
* @param doc
* @param id id attribute's value
* @return first matching element, or null if none found
*/
public static <N, E extends N, T extends N> E findElementById(
ReadableDocument<N, E, T> doc, String id) {
return findElementByAttr(doc, "id", id);
}
/**
* Iterates through startNode and its child elements and returns the first
* with the matching name value pair amongst its attributes.
*/
public static <N, E extends N, T extends N> E findElementByAttr(
ReadableDocument<N, E, T> doc, E subtreeRoot, String name, String value) {
Preconditions.checkNotNull(name, "name must not be null");
Preconditions.checkNotNull(value, "value must not be null");
for (E el : DocIterate.deepElements(doc, subtreeRoot, subtreeRoot)) {
if (value.equals(doc.getAttribute(el, name))) {
return el;
}
}
return null;
}
/**
* Iterates through elements in the document and returns the first with the
* matching name value pair amongst its attributes.
*/
public static <N, E extends N, T extends N> E findElementByAttr(
ReadableDocument<N, E, T> doc, String name, String value) {
return findElementByAttr(doc, doc.getDocumentElement(), name, value);
}
/**
* Does a linear search for an element with the given id and
* returns its location
*
* @param doc
* @param id id attribute's value
* @return first matching element's location, or -1 if none found
*/
public static <N, E extends N, T extends N> int findLocationById(
ReadableWDocument<N, E, T> doc, String id) {
return findLocationByAttr(doc, "id", id);
}
/**
* Returns the location of the first matching element
*
* @see #findElementByAttr(ReadableDocument, String, String)
*
* @return the location of the first matching element or -1 if none found
*/
public static <N, E extends N, T extends N> int findLocationByAttr(
ReadableWDocument<N, E, T> doc, String name, String value) {
E el = findElementByAttr(doc, name, value);
return el != null ? doc.getLocation(el) : -1;
}
/**
* A predicate that matches the document's root element
*/
public static final DocPredicate ROOT_PREDICATE = new DocPredicate() {
@Override
public <N, E extends N, T extends N> boolean apply(ReadableDocument<N, E, T> doc, N node) {
return node == doc.getDocumentElement();
}
};
/**
* @return true if the node is an element with the given tag name
*/
public static <N, E extends N> boolean isMatchingElement(
final ReadableDocument<N, E, ?> doc, N node, String tagName) {
E el = doc.asElement(node);
return el != null && doc.getTagName(el).equals(tagName);
}
/**
* Maneuvers the given point upwards such that its containing element matches
* the given predicate. Where this requires an element point, the nodeAfter
* will be forced rightwards as necessary. If the location is in a text node
* whose parent matches the predicate, the location already satisfies.
*
* Will return the same point by identity where possible.
*
* @return the point within an element matching the predicate, or null if
* there were none.
*/
@SuppressWarnings("unchecked") // safe
public static <N, E extends N, T extends N> Point<N> jumpOut(
ReadableDocument<N, E, T> doc, Point<N> location, DocPredicate predicate) {
E el;
N nodeAfter;
if (location.isInTextNode()) {
el = doc.getParentElement(location.getContainer());
nodeAfter = doc.getNextSibling(location.getContainer());
if (predicate.apply(doc, el)) {
return location;
}
} else {
assert doc.asElement(location.getContainer()) != null;
el = (E) location.getContainer();
nodeAfter = location.getNodeAfter();
}
while (el != null && !predicate.apply(doc, el)) {
nodeAfter = doc.getNextSibling(el);
el = doc.getParentElement(el);
}
if (el == null) {
return null;
}
// nodeAfter is of type N, el is of type (E extends N), so inElement(el, nodeAfter)
// should return a Point<N>. But Sun's java compiler doesn't figure that out,
// so we need to hint: Point.<N>inElement(...)
return el == location.getContainer() ? location : Point.<N>inElement(el, nodeAfter);
}
/**
* Gets the first top-level element in a document.
*
* This is a transition method. It has a different contact for old ops vs new
* ops. After moving to new ops, this method should be deleted and calls to it
* replaced with the direct version.
*
* In old ops, this returns:
* <code>
* doc.getDocumentElement();
* </code>
* and so is never null.
*
* In new ops, this returns:
* <code>
* DocHelper.getFirstChildElement(doc, doc.getDocumentElement());
* </code>
* and may be null.
*
* @param doc document
* @return first top-level element in a document. May be null.
*/
private static <N, E extends N> E getOrCreateFirstTopLevelElement(MutableDocument<N, E, ?> doc,
String tag, Expectation expectation) {
N firstNode = doc.locate(0).getNodeAfter();
if (expectation == Expectation.PRESENT && firstNode == null) {
throw new IllegalArgumentException("Document has no top-level element");
} else if (expectation == Expectation.ABSENT && firstNode != null) {
throw new IllegalArgumentException("Document already has top-level node: " + firstNode);
}
if (firstNode == null) {
return doc.createChildElement(doc.getDocumentElement(), tag, Attributes.EMPTY_MAP);
} else {
E firstElement = doc.asElement(firstNode);
if (firstElement == null) {
throw new IllegalArgumentException("First node is not an element: " + firstNode);
}
// Check that this element matches what is expected.
String actualTag = doc.getTagName(firstElement);
if (!tag.equals(actualTag)) {
throw new RuntimeException("Document already has non-matching top-level element: "
+ firstElement);
} else {
return firstElement;
}
}
}
/**
* Gets the first top-level element, creating it if it does not exist. If
* there is an existing top-level element, but it does not match the expected
* tag, this method fails.
*
* In order to avoid race conditions from multiple clients creating multiple
* top-level elements, please consider using
* {@link #expectAndGetFirstTopLevelElement(MutableDocument, String)} or
* {@link #createFirstTopLevelElement(MutableDocument, String)} instead.
*
* @param doc document
* @param tag tag name for the top-level element
* @return first top-level element, created if necessary. Never null.
*/
public static <E> E getOrCreateFirstTopLevelElement(MutableDocument<? super E, E, ?> doc,
String tag) {
return getOrCreateFirstTopLevelElement(doc, tag, Expectation.NONE);
}
/**
* Gets the first top-level element if it is present.
*
* @param doc document
* @param tag tag name for the top-level element
* @throws RuntimeException if there is no such element, or it does not match
* the specific tag.
* @return the first top-level element. Never null.
*/
public static <E> E expectAndGetFirstTopLevelElement(MutableDocument<? super E, E, ?> doc,
String tag) {
return getOrCreateFirstTopLevelElement(doc, tag, Expectation.PRESENT);
}
/**
* Creates the first top-level element. If a top-level element already exists,
* this method fails.
*
* @param doc document
* @param tag tag name for the top-level element
* @throws RuntimeException if a top-level element already exists.
* @return the newly created top-level element. Never null.
*/
public static <E> E createFirstTopLevelElement(MutableDocument<? super E, E, ?> doc, String tag) {
return getOrCreateFirstTopLevelElement(doc, tag, Expectation.ABSENT);
}
/**
* Find the nearest common ancestor of two nodes
*
* @return The nearest common ancestor of node1 and node2
*/
public static <N, E extends N, T extends N> N nearestCommonAncestor(
ReadableDocument<N, E, T> doc, N node1, N node2) {
IdentityMap<N, N> ancestors = CollectionUtils.createIdentityMap();
if (node1 == node2) {
return node1;
}
N commonAncestor = null;
while (node1 != null || node2 != null) {
if (node1 != null) {
if (ancestors.has(node1)) {
commonAncestor = node1;
break;
}
ancestors.put(node1, node1);
node1 = doc.getParentElement(node1);
}
if (node2 != null) {
if (ancestors.has(node2)) {
commonAncestor = node2;
break;
}
ancestors.put(node2, node2);
node2 = doc.getParentElement(node2);
}
}
if (commonAncestor == null) {
throw new IllegalArgumentException("nearestCommonAncestor: " +
"Given nodes are not in the same document");
}
return commonAncestor;
}
/**
* Checks whether a given node is an ancestory of another (either inclusive or exclusive).
* @param doc Document for tree traversal
* @param ancestor A (non-null) node to check to check if the next param is a descendant of
* @param child The node whose ancestory is being checked
* @param canEqual The result if the two nodes are equal
*/
public static <N, E extends N, T extends N>
boolean isAncestor(ReadableDocument<N, E, T> doc, N ancestor, N child, boolean canEqual) {
Preconditions.checkNotNull(ancestor, "Shouldn't check ancestry of a null node");
// keep going up the tree until we break out the parent (complexity = depth of child)
while (child != null) {
if (ancestor == child) {
return canEqual;
}
canEqual = true; // now equality represents absolute descendancy
child = doc.getParentElement(child);
}
return false; // no match
}
}
|
apache/hadoop-mapreduce | 37,680 | src/test/mapred/org/apache/hadoop/mapred/TestTaskTrackerLocalization.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.LinkedHashMap;
import java.util.TreeMap;
import java.util.jar.JarOutputStream;
import java.util.zip.ZipEntry;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.LocalDirAllocator;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hadoop.mapreduce.security.TokenCache;
import org.apache.hadoop.mapreduce.server.tasktracker.Localizer;
import org.apache.hadoop.mapreduce.util.MRAsyncDiskService;
import static org.apache.hadoop.mapred.QueueManager.toFullPropertyName;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.mapred.JvmManager.JvmEnv;
import org.apache.hadoop.mapred.TaskController.JobInitializationContext;
import org.apache.hadoop.mapred.TaskController.TaskControllerContext;
import org.apache.hadoop.mapred.TaskTracker.RunningJob;
import org.apache.hadoop.mapred.TaskTracker.TaskInProgress;
import org.apache.hadoop.mapred.UtilsForTests.InlineCleanupQueue;
import junit.framework.TestCase;
/**
* Test to verify localization of a job and localization of a task on a
* TaskTracker.
*
*/
public class TestTaskTrackerLocalization extends TestCase {
private static File TEST_ROOT_DIR =
new File(System.getProperty("test.build.data", "/tmp"));
private File ROOT_MAPRED_LOCAL_DIR;
private File HADOOP_LOG_DIR;
private static File PERMISSION_SCRIPT_DIR;
private static File PERMISSION_SCRIPT_FILE;
private static final String PERMISSION_SCRIPT_CONTENT = "ls -l -d $1 | " +
"awk '{print $1\":\"$3\":\"$4}'";
private int numLocalDirs = 6;
private static final Log LOG =
LogFactory.getLog(TestTaskTrackerLocalization.class);
protected TaskTracker tracker;
protected UserGroupInformation taskTrackerUGI;
protected TaskController taskController;
protected JobConf trackerFConf;
private JobConf localizedJobConf;
protected JobID jobId;
protected TaskAttemptID taskId;
protected Task task;
protected String[] localDirs;
protected static LocalDirAllocator lDirAlloc =
new LocalDirAllocator(MRConfig.LOCAL_DIR);
protected Path attemptWorkDir;
protected File[] attemptLogFiles;
protected JobConf localizedTaskConf;
private TaskInProgress tip;
private JobConf jobConf;
private File jobConfFile;
/**
* Dummy method in this base class. Only derived classes will define this
* method for checking if a test can be run.
*/
protected boolean canRun() {
return true;
}
@Override
protected void setUp()
throws Exception {
if (!canRun()) {
return;
}
TEST_ROOT_DIR =
new File(System.getProperty("test.build.data", "/tmp"), getClass()
.getSimpleName());
if (!TEST_ROOT_DIR.exists()) {
TEST_ROOT_DIR.mkdirs();
}
ROOT_MAPRED_LOCAL_DIR = new File(TEST_ROOT_DIR, "mapred/local");
ROOT_MAPRED_LOCAL_DIR.mkdirs();
HADOOP_LOG_DIR = new File(TEST_ROOT_DIR, "logs");
HADOOP_LOG_DIR.mkdir();
System.setProperty("hadoop.log.dir", HADOOP_LOG_DIR.getAbsolutePath());
trackerFConf = new JobConf();
trackerFConf.set(FileSystem.FS_DEFAULT_NAME_KEY, "file:///");
localDirs = new String[numLocalDirs];
for (int i = 0; i < numLocalDirs; i++) {
localDirs[i] = new File(ROOT_MAPRED_LOCAL_DIR, "0_" + i).getPath();
}
trackerFConf.setStrings(MRConfig.LOCAL_DIR, localDirs);
trackerFConf.setBoolean(MRConfig.MR_ACLS_ENABLED, true);
// Create the job configuration file. Same as trackerConf in this test.
jobConf = new JobConf(trackerFConf);
// Set job view ACLs in conf sothat validation of contents of jobACLsFile
// can be done against this value. Have both users and groups
String jobViewACLs = "user1,user2, group1,group2";
jobConf.set(MRJobConfig.JOB_ACL_VIEW_JOB, jobViewACLs);
jobConf.setInt(MRJobConfig.USER_LOG_RETAIN_HOURS, 0);
jobConf.setUser(getJobOwner().getShortUserName());
String queue = "default";
// set job queue name in job conf
jobConf.setQueueName(queue);
// Set queue admins acl in job conf similar to what JobClient does so that
// it goes into job conf also.
jobConf.set(toFullPropertyName(queue,
QueueACL.ADMINISTER_JOBS.getAclName()),
"qAdmin1,qAdmin2 qAdminsGroup1,qAdminsGroup2");
Job job = Job.getInstance(jobConf);
String jtIdentifier = "200907202331";
jobId = new JobID(jtIdentifier, 1);
// JobClient uploads the job jar to the file system and sets it in the
// jobConf.
uploadJobJar(job);
// JobClient uploads the jobConf to the file system.
jobConfFile = uploadJobConf(job.getConfiguration());
// create jobTokens file
uploadJobTokensFile();
taskTrackerUGI = UserGroupInformation.getCurrentUser();
startTracker();
// Set up the task to be localized
taskId =
new TaskAttemptID(jtIdentifier, jobId.getId(), TaskType.MAP, 1, 0);
createTask();
// mimic register task
// create the tip
tip = tracker.new TaskInProgress(task, trackerFConf);
}
private void startTracker() throws IOException {
// Set up the TaskTracker
tracker = new TaskTracker();
tracker.setConf(trackerFConf);
tracker.setTaskLogCleanupThread(new UserLogCleaner(trackerFConf));
initializeTracker();
}
private void initializeTracker() throws IOException {
tracker.setIndexCache(new IndexCache(trackerFConf));
tracker.setTaskMemoryManagerEnabledFlag();
// for test case system FS is the local FS
tracker.systemFS = FileSystem.getLocal(trackerFConf);
tracker.setLocalFileSystem(tracker.systemFS);
tracker.systemDirectory = new Path(TEST_ROOT_DIR.getAbsolutePath());
tracker.runningTasks = new LinkedHashMap<TaskAttemptID, TaskInProgress>();
tracker.runningJobs = new TreeMap<JobID, RunningJob>();
tracker.setAsyncDiskService(new MRAsyncDiskService(trackerFConf));
tracker.getAsyncDiskService().cleanupAllVolumes();
// Set up TaskTracker instrumentation
tracker.setTaskTrackerInstrumentation(
TaskTracker.createInstrumentation(tracker, trackerFConf));
// setup task controller
taskController = createTaskController();
taskController.setConf(trackerFConf);
taskController.setup();
tracker.setTaskController(taskController);
tracker.setLocalizer(new Localizer(tracker.getLocalFileSystem(), localDirs,
taskController));
}
protected TaskController createTaskController() {
return new DefaultTaskController();
}
private void createTask()
throws IOException {
task = new MapTask(jobConfFile.toURI().toString(), taskId, 1, null, 1);
task.setConf(jobConf); // Set conf. Set user name in particular.
task.setUser(jobConf.getUser());
}
protected UserGroupInformation getJobOwner() throws IOException {
return UserGroupInformation.getCurrentUser();
}
/**
* static block setting up the permission script which would be used by the
* checkFilePermissions
*/
static {
PERMISSION_SCRIPT_DIR = new File(TEST_ROOT_DIR, "permission_script_dir");
PERMISSION_SCRIPT_FILE = new File(PERMISSION_SCRIPT_DIR, "getperms.sh");
if(PERMISSION_SCRIPT_FILE.exists()) {
PERMISSION_SCRIPT_FILE.delete();
}
if(PERMISSION_SCRIPT_DIR.exists()) {
PERMISSION_SCRIPT_DIR.delete();
}
PERMISSION_SCRIPT_DIR.mkdir();
try {
PrintWriter writer = new PrintWriter(PERMISSION_SCRIPT_FILE);
writer.write(PERMISSION_SCRIPT_CONTENT);
writer.close();
} catch (FileNotFoundException fe) {
fail();
}
PERMISSION_SCRIPT_FILE.setExecutable(true, true);
}
/**
* @param job
* @throws IOException
* @throws FileNotFoundException
*/
private void uploadJobJar(Job job)
throws IOException,
FileNotFoundException {
File jobJarFile = new File(TEST_ROOT_DIR, "jobjar-on-dfs.jar");
JarOutputStream jstream =
new JarOutputStream(new FileOutputStream(jobJarFile));
ZipEntry ze = new ZipEntry("lib/lib1.jar");
jstream.putNextEntry(ze);
jstream.closeEntry();
ze = new ZipEntry("lib/lib2.jar");
jstream.putNextEntry(ze);
jstream.closeEntry();
jstream.finish();
jstream.close();
job.setJar(jobJarFile.toURI().toString());
}
/**
* @param conf
* @return
* @throws FileNotFoundException
* @throws IOException
*/
protected File uploadJobConf(Configuration conf)
throws FileNotFoundException,
IOException {
File jobConfFile = new File(TEST_ROOT_DIR, "jobconf-on-dfs.xml");
FileOutputStream out = new FileOutputStream(jobConfFile);
conf.writeXml(out);
out.close();
return jobConfFile;
}
/**
* create fake JobTokens file
* @return
* @throws IOException
*/
protected void uploadJobTokensFile() throws IOException {
File dir = new File(TEST_ROOT_DIR, jobId.toString());
if(!dir.exists())
assertTrue("faild to create dir="+dir.getAbsolutePath(), dir.mkdirs());
// writing empty file, we don't need the keys for this test
new Credentials().writeTokenStorageFile(new Path("file:///" + dir,
TokenCache.JOB_TOKEN_HDFS_FILE), new Configuration());
}
@Override
protected void tearDown()
throws Exception {
if (!canRun()) {
return;
}
FileUtil.fullyDelete(TEST_ROOT_DIR);
}
protected static String[] getFilePermissionAttrs(String path)
throws IOException {
String[] command = {"bash",PERMISSION_SCRIPT_FILE.getAbsolutePath(), path};
String output=Shell.execCommand(command);
return output.split(":|\n");
}
/**
* Utility method to check permission of a given path. Requires the permission
* script directory to be setup in order to call.
*
*
* @param path
* @param expectedPermissions
* @param expectedOwnerUser
* @param expectedOwnerGroup
* @throws IOException
*/
static void checkFilePermissions(String path, String expectedPermissions,
String expectedOwnerUser, String expectedOwnerGroup)
throws IOException {
String[] attrs = getFilePermissionAttrs(path);
assertTrue("File attrs length is not 3 but " + attrs.length,
attrs.length == 3);
assertTrue("Path " + path + " has the permissions " + attrs[0]
+ " instead of the expected " + expectedPermissions, attrs[0]
.equals(expectedPermissions));
assertTrue("Path " + path + " is user owned not by " + expectedOwnerUser
+ " but by " + attrs[1], attrs[1].equals(expectedOwnerUser));
assertTrue("Path " + path + " is group owned not by " + expectedOwnerGroup
+ " but by " + attrs[2], attrs[2].equals(expectedOwnerGroup));
}
/**
* Verify the task-controller's setup functionality
*
* @throws IOException
*/
public void testTaskControllerSetup()
throws IOException {
if (!canRun()) {
return;
}
// Task-controller is already set up in the test's setup method. Now verify.
for (String localDir : localDirs) {
// Verify the local-dir itself.
File lDir = new File(localDir);
assertTrue("localDir " + lDir + " doesn't exists!", lDir.exists());
checkFilePermissions(lDir.getAbsolutePath(), "drwxr-xr-x", task
.getUser(), taskTrackerUGI.getGroupNames()[0]);
}
// Verify the pemissions on the userlogs dir
File taskLog = TaskLog.getUserLogDir();
checkFilePermissions(taskLog.getAbsolutePath(), "drwxr-xr-x", task
.getUser(), taskTrackerUGI.getGroupNames()[0]);
}
/**
* Test the localization of a user on the TT.
*
* @throws IOException
*/
public void testUserLocalization()
throws IOException {
if (!canRun()) {
return;
}
// /////////// The main method being tested
tracker.getLocalizer().initializeUserDirs(task.getUser());
// ///////////
// Check the directory structure and permissions
checkUserLocalization();
// For the sake of testing re-entrancy of initializeUserDirs(), we remove
// the user directories now and make sure that further calls of the method
// don't create directories any more.
for (String dir : localDirs) {
File userDir = new File(dir, TaskTracker.getUserDir(task.getUser()));
if (!FileUtil.fullyDelete(userDir)) {
throw new IOException("Uanble to delete " + userDir);
}
}
// Now call the method again.
tracker.getLocalizer().initializeUserDirs(task.getUser());
// Files should not be created now and so shouldn't be there anymore.
for (String dir : localDirs) {
File userDir = new File(dir, TaskTracker.getUserDir(task.getUser()));
assertFalse("Unexpectedly, user-dir " + userDir.getAbsolutePath()
+ " exists!", userDir.exists());
}
}
protected void checkUserLocalization()
throws IOException {
for (String dir : localDirs) {
File localDir = new File(dir);
assertTrue(MRConfig.LOCAL_DIR + localDir + " isn'task created!",
localDir.exists());
File taskTrackerSubDir = new File(localDir, TaskTracker.SUBDIR);
assertTrue("taskTracker sub-dir in the local-dir " + localDir
+ "is not created!", taskTrackerSubDir.exists());
File userDir = new File(taskTrackerSubDir, task.getUser());
assertTrue("user-dir in taskTrackerSubdir " + taskTrackerSubDir
+ "is not created!", userDir.exists());
checkFilePermissions(userDir.getAbsolutePath(), "drwx------", task
.getUser(), taskTrackerUGI.getGroupNames()[0]);
File jobCache = new File(userDir, TaskTracker.JOBCACHE);
assertTrue("jobcache in the userDir " + userDir + " isn't created!",
jobCache.exists());
checkFilePermissions(jobCache.getAbsolutePath(), "drwx------", task
.getUser(), taskTrackerUGI.getGroupNames()[0]);
// Verify the distributed cache dir.
File distributedCacheDir =
new File(localDir, TaskTracker
.getPrivateDistributedCacheDir(task.getUser()));
assertTrue("distributed cache dir " + distributedCacheDir
+ " doesn't exists!", distributedCacheDir.exists());
checkFilePermissions(distributedCacheDir.getAbsolutePath(),
"drwx------", task.getUser(), taskTrackerUGI.getGroupNames()[0]);
}
}
/**
* Test job localization on a TT. Tests localization of job.xml, job.jar and
* corresponding setting of configuration. Also test
* {@link TaskController#initializeJob(JobInitializationContext)}
*
* @throws IOException
*/
public void testJobLocalization()
throws Exception {
if (!canRun()) {
return;
}
TaskTracker.RunningJob rjob = tracker.localizeJob(tip);
localizedJobConf = rjob.getJobConf();
checkJobLocalization();
}
/**
* Test that, if the job log dir can't be created, the job will fail
* during localization rather than at the time when the task itself
* tries to write into it.
*/
public void testJobLocalizationFailsIfLogDirUnwritable()
throws Exception {
if (!canRun()) {
return;
}
File logDir = TaskLog.getJobDir(jobId);
File logDirParent = logDir.getParentFile();
try {
assertTrue(logDirParent.mkdirs() || logDirParent.isDirectory());
FileUtil.fullyDelete(logDir);
FileUtil.chmod(logDirParent.getAbsolutePath(), "000");
tracker.localizeJob(tip);
fail("No exception");
} catch (IOException ioe) {
LOG.info("Got exception", ioe);
assertTrue(ioe.getMessage().contains("Could not create job user log"));
} finally {
// Put it back just to be safe
FileUtil.chmod(logDirParent.getAbsolutePath(), "755");
}
}
protected void checkJobLocalization()
throws IOException {
// Check the directory structure
for (String dir : localDirs) {
File localDir = new File(dir);
File taskTrackerSubDir = new File(localDir, TaskTracker.SUBDIR);
File userDir = new File(taskTrackerSubDir, task.getUser());
File jobCache = new File(userDir, TaskTracker.JOBCACHE);
File jobDir = new File(jobCache, jobId.toString());
assertTrue("job-dir in " + jobCache + " isn't created!", jobDir.exists());
// check the private permissions on the job directory
checkFilePermissions(jobDir.getAbsolutePath(), "drwx------", task
.getUser(), taskTrackerUGI.getGroupNames()[0]);
}
// check the localization of job.xml
assertTrue("job.xml is not localized on this TaskTracker!!", lDirAlloc
.getLocalPathToRead(TaskTracker.getLocalJobConfFile(task.getUser(),
jobId.toString()), trackerFConf) != null);
// check the localization of job.jar
Path jarFileLocalized =
lDirAlloc.getLocalPathToRead(TaskTracker.getJobJarFile(task.getUser(),
jobId.toString()), trackerFConf);
assertTrue("job.jar is not localized on this TaskTracker!!",
jarFileLocalized != null);
assertTrue("lib/lib1.jar is not unjarred on this TaskTracker!!", new File(
jarFileLocalized.getParent() + Path.SEPARATOR + "lib/lib1.jar")
.exists());
assertTrue("lib/lib2.jar is not unjarred on this TaskTracker!!", new File(
jarFileLocalized.getParent() + Path.SEPARATOR + "lib/lib2.jar")
.exists());
// check the creation of job work directory
assertTrue("job-work dir is not created on this TaskTracker!!", lDirAlloc
.getLocalPathToRead(TaskTracker.getJobWorkDir(task.getUser(), jobId
.toString()), trackerFConf) != null);
// Check the setting of mapreduce.job.local.dir and job.jar which will eventually be
// used by the user's task
boolean jobLocalDirFlag = false, mapredJarFlag = false;
String localizedJobLocalDir =
localizedJobConf.get(TaskTracker.JOB_LOCAL_DIR);
String localizedJobJar = localizedJobConf.getJar();
for (String localDir : localizedJobConf.getStrings(MRConfig.LOCAL_DIR)) {
if (localizedJobLocalDir.equals(localDir + Path.SEPARATOR
+ TaskTracker.getJobWorkDir(task.getUser(), jobId.toString()))) {
jobLocalDirFlag = true;
}
if (localizedJobJar.equals(localDir + Path.SEPARATOR
+ TaskTracker.getJobJarFile(task.getUser(), jobId.toString()))) {
mapredJarFlag = true;
}
}
assertTrue(TaskTracker.JOB_LOCAL_DIR
+ " is not set properly to the target users directory : "
+ localizedJobLocalDir, jobLocalDirFlag);
assertTrue(
"mapreduce.job.jar is not set properly to the target users directory : "
+ localizedJobJar, mapredJarFlag);
// check job user-log directory permissions
File jobLogDir = TaskLog.getJobDir(jobId);
assertTrue("job log directory " + jobLogDir + " does not exist!", jobLogDir
.exists());
checkFilePermissions(jobLogDir.toString(), "drwx------", task.getUser(),
taskTrackerUGI.getGroupNames()[0]);
// Make sure that the job ACLs file job-acls.xml exists in job userlog dir
File jobACLsFile = new File(jobLogDir, TaskTracker.jobACLsFile);
assertTrue("JobACLsFile is missing in the job userlog dir " + jobLogDir,
jobACLsFile.exists());
// With default task controller, the job-acls.xml file is owned by TT and
// permissions are 700
checkFilePermissions(jobACLsFile.getAbsolutePath(), "-rw-------",
taskTrackerUGI.getShortUserName(), taskTrackerUGI.getGroupNames()[0]);
validateJobACLsFileContent();
}
// Validate the contents of jobACLsFile ( i.e. user name, job-view-acl, queue
// name and queue-admins-acl ).
protected void validateJobACLsFileContent() {
JobConf jobACLsConf = TaskLogServlet.getConfFromJobACLsFile(jobId);
assertTrue(jobACLsConf.get("user.name").equals(
localizedJobConf.getUser()));
assertTrue(jobACLsConf.get(MRJobConfig.JOB_ACL_VIEW_JOB).
equals(localizedJobConf.get(MRJobConfig.JOB_ACL_VIEW_JOB)));
String queue = localizedJobConf.getQueueName();
assertTrue(queue.equalsIgnoreCase(jobACLsConf.getQueueName()));
String qACLName = toFullPropertyName(queue,
QueueACL.ADMINISTER_JOBS.getAclName());
assertTrue(jobACLsConf.get(qACLName).equals(
localizedJobConf.get(qACLName)));
}
/**
* Test task localization on a TT.
*
* @throws IOException
*/
public void testTaskLocalization()
throws Exception {
if (!canRun()) {
return;
}
TaskTracker.RunningJob rjob = tracker.localizeJob(tip);
localizedJobConf = rjob.getJobConf();
initializeTask();
checkTaskLocalization();
}
private void initializeTask() throws IOException {
tip.setJobConf(localizedJobConf);
// ////////// The central method being tested
tip.localizeTask(task);
// //////////
// check the functionality of localizeTask
for (String dir : trackerFConf.getStrings(MRConfig.LOCAL_DIR)) {
File attemptDir =
new File(dir, TaskTracker.getLocalTaskDir(task.getUser(), jobId
.toString(), taskId.toString(), task.isTaskCleanupTask()));
assertTrue("attempt-dir " + attemptDir + " in localDir " + dir
+ " is not created!!", attemptDir.exists());
}
attemptWorkDir =
lDirAlloc.getLocalPathToRead(TaskTracker.getTaskWorkDir(
task.getUser(), task.getJobID().toString(), task.getTaskID()
.toString(), task.isTaskCleanupTask()), trackerFConf);
assertTrue("atttempt work dir for " + taskId.toString()
+ " is not created in any of the configured dirs!!",
attemptWorkDir != null);
TaskRunner runner = task.createRunner(tracker, tip);
tip.setTaskRunner(runner);
// /////// Few more methods being tested
runner.setupChildTaskConfiguration(lDirAlloc);
TaskRunner.createChildTmpDir(new File(attemptWorkDir.toUri().getPath()),
localizedJobConf);
attemptLogFiles = runner.prepareLogFiles(task.getTaskID(),
task.isTaskCleanupTask());
// Make sure the task-conf file is created
Path localTaskFile =
lDirAlloc.getLocalPathToRead(TaskTracker.getTaskConfFile(task
.getUser(), task.getJobID().toString(), task.getTaskID()
.toString(), task.isTaskCleanupTask()), trackerFConf);
assertTrue("Task conf file " + localTaskFile.toString()
+ " is not created!!", new File(localTaskFile.toUri().getPath())
.exists());
// /////// One more method being tested. This happens in child space.
localizedTaskConf = new JobConf(localTaskFile);
TaskRunner.setupChildMapredLocalDirs(task, localizedTaskConf);
// ///////
// Initialize task via TaskController
TaskControllerContext taskContext =
new TaskController.TaskControllerContext();
taskContext.env =
new JvmEnv(null, null, null, null, -1, new File(localizedJobConf
.get(TaskTracker.JOB_LOCAL_DIR)), null, localizedJobConf);
taskContext.task = task;
// /////////// The method being tested
taskController.initializeTask(taskContext);
// ///////////
}
protected void checkTaskLocalization()
throws IOException {
// Make sure that the mapreduce.cluster.local.dir is sandboxed
for (String childMapredLocalDir : localizedTaskConf
.getStrings(MRConfig.LOCAL_DIR)) {
assertTrue("Local dir " + childMapredLocalDir + " is not sandboxed !!",
childMapredLocalDir.endsWith(TaskTracker.getLocalTaskDir(task
.getUser(), jobId.toString(), taskId.toString(),
task.isTaskCleanupTask())));
}
// Make sure task task.getJobFile is changed and pointed correctly.
assertTrue(task.getJobFile().endsWith(
TaskTracker.getTaskConfFile(task.getUser(), jobId.toString(), taskId
.toString(), task.isTaskCleanupTask())));
// Make sure that the tmp directories are created
assertTrue("tmp dir is not created in workDir "
+ attemptWorkDir.toUri().getPath(), new File(attemptWorkDir.toUri()
.getPath(), "tmp").exists());
// Make sure that the logs are setup properly
File logDir = TaskLog.getAttemptDir(taskId, task.isTaskCleanupTask());
assertTrue("task's log dir " + logDir.toString() + " doesn't exist!",
logDir.exists());
checkFilePermissions(logDir.getAbsolutePath(), "drwx------", task
.getUser(), taskTrackerUGI.getGroupNames()[0]);
File expectedStdout = new File(logDir, TaskLog.LogName.STDOUT.toString());
assertTrue("stdout log file is improper. Expected : "
+ expectedStdout.toString() + " Observed : "
+ attemptLogFiles[0].toString(), expectedStdout.toString().equals(
attemptLogFiles[0].toString()));
File expectedStderr =
new File(logDir, Path.SEPARATOR + TaskLog.LogName.STDERR.toString());
assertTrue("stderr log file is improper. Expected : "
+ expectedStderr.toString() + " Observed : "
+ attemptLogFiles[1].toString(), expectedStderr.toString().equals(
attemptLogFiles[1].toString()));
}
/**
* Create a file in the given dir and set permissions r_xr_xr_x sothat no one
* can delete it directly(without doing chmod).
* Creates dir/subDir and dir/subDir/file
*/
static void createFileAndSetPermissions(JobConf jobConf, Path dir)
throws IOException {
Path subDir = new Path(dir, "subDir");
FileSystem fs = FileSystem.getLocal(jobConf);
fs.mkdirs(subDir);
Path p = new Path(subDir, "file");
java.io.DataOutputStream out = fs.create(p);
out.writeBytes("dummy input");
out.close();
// no write permission for subDir and subDir/file
try {
int ret = 0;
if((ret = FileUtil.chmod(subDir.toUri().getPath(), "a=rx", true)) != 0) {
LOG.warn("chmod failed for " + subDir + ";retVal=" + ret);
}
} catch(InterruptedException e) {
LOG.warn("Interrupted while doing chmod for " + subDir);
}
}
/**
* Validates the removal of $taskid and $tasid/work under mapred-local-dir
* in cases where those directories cannot be deleted without adding
* write permission to the newly created directories under $taskid and
* $taskid/work
* Also see createFileAndSetPermissions for details
*/
void validateRemoveTaskFiles(boolean needCleanup, boolean jvmReuse,
TaskInProgress tip) throws IOException {
// create files and set permissions 555. Verify if task controller sets
// the permissions for TT to delete the taskDir or workDir
String dir = (!needCleanup || jvmReuse) ?
TaskTracker.getTaskWorkDir(task.getUser(), task.getJobID().toString(),
taskId.toString(), task.isTaskCleanupTask())
: TaskTracker.getLocalTaskDir(task.getUser(), task.getJobID().toString(),
taskId.toString(), task.isTaskCleanupTask());
Path[] paths = tracker.getLocalFiles(localizedJobConf, dir);
assertTrue("No paths found", paths.length > 0);
for (Path p : paths) {
if (tracker.getLocalFileSystem().exists(p)) {
createFileAndSetPermissions(localizedJobConf, p);
}
}
InlineCleanupQueue cleanupQueue = new InlineCleanupQueue();
tracker.setCleanupThread(cleanupQueue);
tip.removeTaskFiles(needCleanup, taskId);
if (jvmReuse) {
// work dir should still exist and cleanup queue should be empty
assertTrue("cleanup queue is not empty after removeTaskFiles() in case "
+ "of jvm reuse.", cleanupQueue.isQueueEmpty());
boolean workDirExists = false;
for (Path p : paths) {
if (tracker.getLocalFileSystem().exists(p)) {
workDirExists = true;
}
}
assertTrue("work dir does not exist in case of jvm reuse", workDirExists);
// now try to delete the work dir and verify that there are no stale paths
JvmManager.deleteWorkDir(tracker, task);
}
assertTrue("Some task files are not deleted!! Number of stale paths is "
+ cleanupQueue.stalePaths.size(), cleanupQueue.stalePaths.size() == 0);
}
/**
* Validates if task cleanup is done properly for a succeeded task
* @throws IOException
*/
public void testTaskFilesRemoval()
throws Exception {
if (!canRun()) {
return;
}
testTaskFilesRemoval(false, false);// no needCleanup; no jvmReuse
}
/**
* Validates if task cleanup is done properly for a task that is not succeeded
* @throws IOException
*/
public void testFailedTaskFilesRemoval()
throws Exception {
if (!canRun()) {
return;
}
testTaskFilesRemoval(true, false);// needCleanup; no jvmReuse
// initialize a cleanupAttempt for the task.
task.setTaskCleanupTask();
// localize task cleanup attempt
initializeTask();
checkTaskLocalization();
// verify the cleanup of cleanup attempt.
testTaskFilesRemoval(true, false);// needCleanup; no jvmReuse
}
/**
* Validates if task cleanup is done properly for a succeeded task
* @throws IOException
*/
public void testTaskFilesRemovalWithJvmUse()
throws Exception {
if (!canRun()) {
return;
}
testTaskFilesRemoval(false, true);// no needCleanup; jvmReuse
}
/**
* Validates if task cleanup is done properly
*/
private void testTaskFilesRemoval(boolean needCleanup, boolean jvmReuse)
throws Exception {
// Localize job and localize task.
TaskTracker.RunningJob rjob = tracker.localizeJob(tip);
localizedJobConf = rjob.getJobConf();
if (jvmReuse) {
localizedJobConf.setNumTasksToExecutePerJvm(2);
}
initializeTask();
// TODO: Let the task run and create files.
// create files and set permissions 555. Verify if task controller sets
// the permissions for TT to delete the task dir or work dir properly
validateRemoveTaskFiles(needCleanup, jvmReuse, tip);
}
/**
* Test userlogs cleanup.
*
* @throws IOException
*/
private void verifyUserLogsRemoval()
throws IOException {
// verify user logs cleanup
File jobUserLogDir = TaskLog.getJobDir(jobId);
// Logs should be there before cleanup.
assertTrue("Userlogs dir " + jobUserLogDir + " is not present as expected!!",
jobUserLogDir.exists());
tracker.purgeJob(new KillJobAction(jobId));
tracker.getTaskLogCleanupThread().processCompletedJobs();
// Logs should be gone after cleanup.
assertFalse("Userlogs dir " + jobUserLogDir + " is not deleted as expected!!",
jobUserLogDir.exists());
}
/**
* Test job cleanup by doing the following
* - create files with no write permissions to TT under job-work-dir
* - create files with no write permissions to TT under task-work-dir
*/
public void testJobFilesRemoval() throws IOException, InterruptedException {
if (!canRun()) {
return;
}
LOG.info("Running testJobCleanup()");
// Localize job and localize task.
TaskTracker.RunningJob rjob = tracker.localizeJob(tip);
localizedJobConf = rjob.getJobConf();
// Set an inline cleanup queue
InlineCleanupQueue cleanupQueue = new InlineCleanupQueue();
tracker.setCleanupThread(cleanupQueue);
// Create a file in job's work-dir with 555
String jobWorkDir =
TaskTracker.getJobWorkDir(task.getUser(), task.getJobID().toString());
Path[] jPaths = tracker.getLocalFiles(localizedJobConf, jobWorkDir);
assertTrue("No paths found for job", jPaths.length > 0);
for (Path p : jPaths) {
if (tracker.getLocalFileSystem().exists(p)) {
createFileAndSetPermissions(localizedJobConf, p);
}
}
// Initialize task dirs
tip.setJobConf(localizedJobConf);
tip.localizeTask(task);
// Create a file in task local dir with 555
// this is to simply test the case where the jvm reuse is enabled and some
// files in task-attempt-local-dir are left behind to be cleaned up when the
// job finishes.
String taskLocalDir =
TaskTracker.getLocalTaskDir(task.getUser(), task.getJobID().toString(),
task.getTaskID().toString(), false);
Path[] tPaths = tracker.getLocalFiles(localizedJobConf, taskLocalDir);
assertTrue("No paths found for task", tPaths.length > 0);
for (Path p : tPaths) {
if (tracker.getLocalFileSystem().exists(p)) {
createFileAndSetPermissions(localizedJobConf, p);
}
}
// remove the job work dir
tracker.removeJobFiles(task.getUser(), task.getJobID());
// check the task-local-dir
boolean tLocalDirExists = false;
for (Path p : tPaths) {
if (tracker.getLocalFileSystem().exists(p)) {
tLocalDirExists = true;
}
}
assertFalse("Task " + task.getTaskID() + " local dir exists after cleanup",
tLocalDirExists);
// Verify that the TaskTracker (via the task-controller) cleans up the dirs.
// check the job-work-dir
boolean jWorkDirExists = false;
for (Path p : jPaths) {
if (tracker.getLocalFileSystem().exists(p)) {
jWorkDirExists = true;
}
}
assertFalse("Job " + task.getJobID() + " work dir exists after cleanup",
jWorkDirExists);
// Test userlogs cleanup.
verifyUserLogsRemoval();
// Check that the empty $mapred.local.dir/taskTracker/$user dirs are still
// there.
for (String localDir : localDirs) {
Path userDir =
new Path(localDir, TaskTracker.getUserDir(task.getUser()));
assertTrue("User directory " + userDir + " is not present!!",
tracker.getLocalFileSystem().exists(userDir));
}
}
/**
* Tests TaskTracker restart after the localization.
*
* This tests the following steps:
*
* Localize Job, initialize a task.
* Then restart the Tracker.
* launch a cleanup attempt for the task.
*
* @throws IOException
* @throws InterruptedException
*/
public void testTrackerRestart() throws IOException, InterruptedException {
if (!canRun()) {
return;
}
// Localize job and localize task.
TaskTracker.RunningJob rjob = tracker.localizeJob(tip);
localizedJobConf = rjob.getJobConf();
initializeTask();
// imitate tracker restart
startTracker();
// create a task cleanup attempt
createTask();
task.setTaskCleanupTask();
// register task
tip = tracker.new TaskInProgress(task, trackerFConf);
// localize the job again.
rjob = tracker.localizeJob(tip);
localizedJobConf = rjob.getJobConf();
checkJobLocalization();
// localize task cleanup attempt
initializeTask();
checkTaskLocalization();
}
/**
* Tests TaskTracker re-init after the localization.
*
* This tests the following steps:
*
* Localize Job, initialize a task.
* Then reinit the Tracker.
* launch a cleanup attempt for the task.
*
* @throws IOException
* @throws InterruptedException
*/
public void testTrackerReinit() throws IOException, InterruptedException {
if (!canRun()) {
return;
}
// Localize job and localize task.
TaskTracker.RunningJob rjob = tracker.localizeJob(tip);
localizedJobConf = rjob.getJobConf();
initializeTask();
// imitate tracker reinit
initializeTracker();
// create a task cleanup attempt
createTask();
task.setTaskCleanupTask();
// register task
tip = tracker.new TaskInProgress(task, trackerFConf);
// localize the job again.
rjob = tracker.localizeJob(tip);
localizedJobConf = rjob.getJobConf();
checkJobLocalization();
// localize task cleanup attempt
initializeTask();
checkTaskLocalization();
}
/**
* Localizes a cleanup task and validates permissions.
*
* @throws InterruptedException
* @throws IOException
*/
public void testCleanupTaskLocalization() throws IOException,
InterruptedException {
if (!canRun()) {
return;
}
task.setTaskCleanupTask();
// register task
tip = tracker.new TaskInProgress(task, trackerFConf);
// localize the job.
RunningJob rjob = tracker.localizeJob(tip);
localizedJobConf = rjob.getJobConf();
checkJobLocalization();
// localize task cleanup attempt
initializeTask();
checkTaskLocalization();
}
}
|
apache/hive | 37,555 | hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/AbstractHCatLoaderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hive.hcatalog.pig;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.RandomAccessFile;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.type.Date;
import org.apache.hadoop.hive.common.type.Timestamp;
import org.apache.hadoop.hive.ql.IDriver;
import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hive.hcatalog.HcatTestUtils;
import org.apache.hive.hcatalog.common.HCatConstants;
import org.apache.hive.hcatalog.common.HCatUtil;
import org.apache.hive.hcatalog.data.Pair;
import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
import org.apache.hive.hcatalog.mapreduce.HCatBaseTest;
import org.apache.pig.PigRunner;
import org.apache.pig.PigServer;
import org.apache.pig.ResourceStatistics;
import org.apache.pig.data.DataType;
import org.apache.pig.data.Tuple;
import org.apache.pig.impl.logicalLayer.schema.Schema;
import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
import org.apache.pig.tools.pigstats.OutputStats;
import org.apache.pig.tools.pigstats.PigStats;
import org.joda.time.DateTime;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class AbstractHCatLoaderTest extends HCatBaseTest {
private static final Logger LOG = LoggerFactory.getLogger(AbstractHCatLoaderTest.class);
private static final String BASIC_FILE_NAME = TEST_DATA_DIR + "/basic.input.data";
private static final String COMPLEX_FILE_NAME = TEST_DATA_DIR + "/complex.input.data";
private static final String DATE_FILE_NAME = TEST_DATA_DIR + "/datetimestamp.input.data";
private static final String BASIC_TABLE = "junit_unparted_basic";
private static final String COMPLEX_TABLE = "junit_unparted_complex";
private static final String PARTITIONED_TABLE = "junit_parted_basic";
private static final String SPECIFIC_SIZE_TABLE = "junit_specific_size";
private static final String SPECIFIC_DATABASE = "junit_specific_db";
private static final String SPECIFIC_SIZE_TABLE_2 = "junit_specific_size2";
private static final String PARTITIONED_DATE_TABLE = "junit_parted_date";
private Map<Integer, Pair<Integer, String>> basicInputData;
protected String storageFormat;
abstract String getStorageFormat();
public AbstractHCatLoaderTest() {
this.storageFormat = getStorageFormat();
}
private void dropTable(String tablename) throws Exception {
dropTable(tablename, driver);
}
static void dropTable(String tablename, IDriver driver) throws Exception {
driver.run("drop table if exists " + tablename);
}
private void createTable(String db, String tablename, String schema, String partitionedBy) throws
Exception {
createTable(db, tablename, schema, partitionedBy, driver, storageFormat);
}
private void createTableDefaultDB(String tablename, String schema, String partitionedBy) throws
Exception {
createTable(null, tablename, schema, partitionedBy, driver, storageFormat);
}
static void createTableDefaultDB(String tablename, String schema, String partitionedBy, IDriver
driver, String storageFormat) throws Exception {
createTable(null, tablename, schema, partitionedBy, driver, storageFormat);
}
static void createTable(String db, String tablename, String schema, String partitionedBy, IDriver
driver, String storageFormat)
throws Exception {
String createTable;
createTable = "create table " + tablename + "(" + schema + ") ";
if ((partitionedBy != null) && (!partitionedBy.trim().isEmpty())) {
createTable = createTable + "partitioned by (" + partitionedBy + ") ";
}
createTable = createTable + "stored as " +storageFormat;
//HCat doesn't support transactional tables
createTable += " TBLPROPERTIES ('transactional'='false')";
if (db != null) {
executeStatementOnDriver("create database if not exists " + db, driver);
executeStatementOnDriver("use " + db + "", driver);
} else {
executeStatementOnDriver("use default", driver);
}
executeStatementOnDriver(createTable, driver);
}
private void createTable(String db, String tablename, String schema) throws Exception {
createTable(db, tablename, schema, null);
}
private void createTableDefaultDB(String tablename, String schema) throws Exception {
createTable(null, tablename, schema, null);
}
/**
* Execute Hive CLI statement
* @param cmd arbitrary statement to execute
*/
static void executeStatementOnDriver(String cmd, IDriver driver) throws Exception {
LOG.debug("Executing: " + cmd);
driver.run(cmd);
}
private static void checkProjection(FieldSchema fs, String expectedName, byte expectedPigType) {
assertEquals(fs.alias, expectedName);
assertEquals("Expected " + DataType.findTypeName(expectedPigType) + "; got " +
DataType.findTypeName(fs.type), expectedPigType, fs.type);
}
@Before
public void setUpTest() throws Exception {
createTableDefaultDB(BASIC_TABLE, "a int, b string");
createTableDefaultDB(COMPLEX_TABLE,
"name string, studentid int, "
+ "contact struct<phno:string,email:string>, "
+ "currently_registered_courses array<string>, "
+ "current_grades map<string,string>, "
+ "phnos array<struct<phno:string,type:string>>");
createTableDefaultDB(PARTITIONED_TABLE, "a int, b string", "bkt string");
createTableDefaultDB(SPECIFIC_SIZE_TABLE, "a int, b string");
createTable(SPECIFIC_DATABASE, SPECIFIC_SIZE_TABLE_2, "a int, b string");
createTableDefaultDB(PARTITIONED_DATE_TABLE, "b string", "dt date");
AllTypesTable.setupAllTypesTable(driver);
int LOOP_SIZE = 3;
String[] input = new String[LOOP_SIZE * LOOP_SIZE];
basicInputData = new HashMap<Integer, Pair<Integer, String>>();
int k = 0;
for (int i = 1; i <= LOOP_SIZE; i++) {
String si = i + "";
for (int j = 1; j <= LOOP_SIZE; j++) {
String sj = "S" + j + "S";
input[k] = si + "\t" + sj;
basicInputData.put(k, new Pair<Integer, String>(i, sj));
k++;
}
}
HcatTestUtils.createTestDataFile(BASIC_FILE_NAME, input);
HcatTestUtils.createTestDataFile(COMPLEX_FILE_NAME,
new String[]{
"Henry Jekyll\t42\t(415-253-6367,hjekyll@contemporary.edu.uk)\t{(PHARMACOLOGY),(PSYCHIATRY)}\t[PHARMACOLOGY#A-,PSYCHIATRY#B+]\t{(415-253-6367,cell),(408-253-6367,landline)}",
"Edward Hyde\t1337\t(415-253-6367,anonymous@b44chan.org)\t{(CREATIVE_WRITING),(COPYRIGHT_LAW)}\t[CREATIVE_WRITING#A+,COPYRIGHT_LAW#D]\t{(415-253-6367,cell),(408-253-6367,landline)}",
}
);
HcatTestUtils.createTestDataFile(DATE_FILE_NAME,
new String[]{
"2016-07-14 08:10:15\tHenry Jekyll",
"2016-07-15 11:54:55\tEdward Hyde",
}
);
PigServer server = createPigServer(false);
server.setBatchOn();
int i = 0;
server.registerQuery("A = load '" + BASIC_FILE_NAME + "' as (a:int, b:chararray);", ++i);
server.registerQuery("store A into '" + BASIC_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer();", ++i);
server.registerQuery("store A into '" + SPECIFIC_SIZE_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer();", ++i);
server.registerQuery("store A into '" + SPECIFIC_DATABASE + "." +SPECIFIC_SIZE_TABLE_2 + "' " +
"using org.apache.hive" +".hcatalog.pig.HCatStorer();", ++i);
server.registerQuery("B = foreach A generate a,b;", ++i);
server.registerQuery("B2 = filter B by a < 2;", ++i);
server.registerQuery("store B2 into '" + PARTITIONED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer('bkt=0');", ++i);
server.registerQuery("C = foreach A generate a,b;", ++i);
server.registerQuery("C2 = filter C by a >= 2;", ++i);
server.registerQuery("store C2 into '" + PARTITIONED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer('bkt=1');", ++i);
server.registerQuery("D = load '" + COMPLEX_FILE_NAME + "' as (name:chararray, studentid:int, contact:tuple(phno:chararray,email:chararray), currently_registered_courses:bag{innertup:tuple(course:chararray)}, current_grades:map[ ] , phnos :bag{innertup:tuple(phno:chararray,type:chararray)});", ++i);
server.registerQuery("store D into '" + COMPLEX_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer();", ++i);
server.registerQuery("E = load '" + DATE_FILE_NAME + "' as (dt:chararray, b:chararray);", ++i);
server.registerQuery("F = foreach E generate ToDate(dt, 'yyyy-MM-dd HH:mm:ss') as dt, b;", ++i);
server.registerQuery("store F into '" + PARTITIONED_DATE_TABLE + "' using org.apache.hive.hcatalog.pig.HCatStorer();", ++i);
server.executeBatch();
}
@After
public void tearDown() throws Exception {
try {
if (driver != null) {
dropTable(BASIC_TABLE);
dropTable(COMPLEX_TABLE);
dropTable(PARTITIONED_TABLE);
dropTable(SPECIFIC_SIZE_TABLE);
dropTable(PARTITIONED_DATE_TABLE);
dropTable(AllTypesTable.ALL_PRIMITIVE_TYPES_TABLE);
executeStatementOnDriver("drop database if exists " + SPECIFIC_DATABASE + " cascade",
driver);
}
} finally {
FileUtils.deleteDirectory(new File(TEST_DATA_DIR));
}
}
@Test
public void testSchemaLoadBasic() throws IOException {
PigServer server = createPigServer(false);
// test that schema was loaded correctly
server.registerQuery("X = load '" + BASIC_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
Schema dumpedXSchema = server.dumpSchema("X");
List<FieldSchema> Xfields = dumpedXSchema.getFields();
assertEquals(2, Xfields.size());
assertTrue(Xfields.get(0).alias.equalsIgnoreCase("a"));
assertTrue(Xfields.get(0).type == DataType.INTEGER);
assertTrue(Xfields.get(1).alias.equalsIgnoreCase("b"));
assertTrue(Xfields.get(1).type == DataType.CHARARRAY);
}
/**
* Test that we properly translate data types in Hive/HCat table schema into Pig schema
*/
@Test
public void testSchemaLoadPrimitiveTypes() throws IOException {
AllTypesTable.testSchemaLoadPrimitiveTypes();
}
/**
* Test that value from Hive table are read properly in Pig
*/
@Test
public void testReadDataPrimitiveTypes() throws Exception {
AllTypesTable.testReadDataPrimitiveTypes();
}
@Test
public void testReadDataBasic() throws IOException {
PigServer server = createPigServer(false);
server.registerQuery("X = load '" + BASIC_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
Iterator<Tuple> XIter = server.openIterator("X");
int numTuplesRead = 0;
while (XIter.hasNext()) {
Tuple t = XIter.next();
assertEquals(2, t.size());
assertNotNull(t.get(0));
assertNotNull(t.get(1));
assertTrue(t.get(0).getClass() == Integer.class);
assertTrue(t.get(1).getClass() == String.class);
assertEquals(t.get(0), basicInputData.get(numTuplesRead).first);
assertEquals(t.get(1), basicInputData.get(numTuplesRead).second);
numTuplesRead++;
}
assertEquals(basicInputData.size(), numTuplesRead);
}
@Test
public void testSchemaLoadComplex() throws IOException {
PigServer server = createPigServer(false);
// test that schema was loaded correctly
server.registerQuery("K = load '" + COMPLEX_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
Schema dumpedKSchema = server.dumpSchema("K");
List<FieldSchema> Kfields = dumpedKSchema.getFields();
assertEquals(6, Kfields.size());
assertEquals(DataType.CHARARRAY, Kfields.get(0).type);
assertEquals("name", Kfields.get(0).alias.toLowerCase());
assertEquals(DataType.INTEGER, Kfields.get(1).type);
assertEquals("studentid", Kfields.get(1).alias.toLowerCase());
assertEquals(DataType.TUPLE, Kfields.get(2).type);
assertEquals("contact", Kfields.get(2).alias.toLowerCase());
{
assertNotNull(Kfields.get(2).schema);
assertTrue(Kfields.get(2).schema.getFields().size() == 2);
assertTrue(Kfields.get(2).schema.getFields().get(0).type == DataType.CHARARRAY);
assertTrue(Kfields.get(2).schema.getFields().get(0).alias.equalsIgnoreCase("phno"));
assertTrue(Kfields.get(2).schema.getFields().get(1).type == DataType.CHARARRAY);
assertTrue(Kfields.get(2).schema.getFields().get(1).alias.equalsIgnoreCase("email"));
}
assertEquals(DataType.BAG, Kfields.get(3).type);
assertEquals("currently_registered_courses", Kfields.get(3).alias.toLowerCase());
{
assertNotNull(Kfields.get(3).schema);
assertEquals(1, Kfields.get(3).schema.getFields().size());
assertEquals(DataType.TUPLE, Kfields.get(3).schema.getFields().get(0).type);
assertNotNull(Kfields.get(3).schema.getFields().get(0).schema);
assertEquals(1, Kfields.get(3).schema.getFields().get(0).schema.getFields().size());
assertEquals(DataType.CHARARRAY, Kfields.get(3).schema.getFields().get(0).schema.getFields().get(0).type);
// assertEquals("course",Kfields.get(3).schema.getFields().get(0).schema.getFields().get(0).alias.toLowerCase());
// commented out, because the name becomes "innerfield" by default - we call it "course" in pig,
// but in the metadata, it'd be anonymous, so this would be autogenerated, which is fine
}
assertEquals(DataType.MAP, Kfields.get(4).type);
assertEquals("current_grades", Kfields.get(4).alias.toLowerCase());
assertEquals(DataType.BAG, Kfields.get(5).type);
assertEquals("phnos", Kfields.get(5).alias.toLowerCase());
{
assertNotNull(Kfields.get(5).schema);
assertEquals(1, Kfields.get(5).schema.getFields().size());
assertEquals(DataType.TUPLE, Kfields.get(5).schema.getFields().get(0).type);
assertNotNull(Kfields.get(5).schema.getFields().get(0).schema);
assertTrue(Kfields.get(5).schema.getFields().get(0).schema.getFields().size() == 2);
assertEquals(DataType.CHARARRAY, Kfields.get(5).schema.getFields().get(0).schema.getFields().get(0).type);
assertEquals("phno", Kfields.get(5).schema.getFields().get(0).schema.getFields().get(0).alias.toLowerCase());
assertEquals(DataType.CHARARRAY, Kfields.get(5).schema.getFields().get(0).schema.getFields().get(1).type);
assertEquals("type", Kfields.get(5).schema.getFields().get(0).schema.getFields().get(1).alias.toLowerCase());
}
}
@Test
public void testReadPartitionedBasic() throws Exception {
PigServer server = createPigServer(false);
driver.run("select * from " + PARTITIONED_TABLE);
ArrayList<String> valuesReadFromHiveDriver = new ArrayList<String>();
driver.getResults(valuesReadFromHiveDriver);
assertEquals(basicInputData.size(), valuesReadFromHiveDriver.size());
server.registerQuery("W = load '" + PARTITIONED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
Schema dumpedWSchema = server.dumpSchema("W");
List<FieldSchema> Wfields = dumpedWSchema.getFields();
assertEquals(3, Wfields.size());
assertTrue(Wfields.get(0).alias.equalsIgnoreCase("a"));
assertTrue(Wfields.get(0).type == DataType.INTEGER);
assertTrue(Wfields.get(1).alias.equalsIgnoreCase("b"));
assertTrue(Wfields.get(1).type == DataType.CHARARRAY);
assertTrue(Wfields.get(2).alias.equalsIgnoreCase("bkt"));
assertTrue(Wfields.get(2).type == DataType.CHARARRAY);
Iterator<Tuple> WIter = server.openIterator("W");
Collection<Pair<Integer, String>> valuesRead = new ArrayList<Pair<Integer, String>>();
while (WIter.hasNext()) {
Tuple t = WIter.next();
assertTrue(t.size() == 3);
assertNotNull(t.get(0));
assertNotNull(t.get(1));
assertNotNull(t.get(2));
assertTrue(t.get(0).getClass() == Integer.class);
assertTrue(t.get(1).getClass() == String.class);
assertTrue(t.get(2).getClass() == String.class);
valuesRead.add(new Pair<Integer, String>((Integer) t.get(0), (String) t.get(1)));
if ((Integer) t.get(0) < 2) {
assertEquals("0", t.get(2));
} else {
assertEquals("1", t.get(2));
}
}
assertEquals(valuesReadFromHiveDriver.size(), valuesRead.size());
server.registerQuery("P1 = load '" + PARTITIONED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
server.registerQuery("P1filter = filter P1 by bkt == '0';");
Iterator<Tuple> P1Iter = server.openIterator("P1filter");
int count1 = 0;
while (P1Iter.hasNext()) {
Tuple t = P1Iter.next();
assertEquals("0", t.get(2));
assertEquals(1, t.get(0));
count1++;
}
assertEquals(3, count1);
server.registerQuery("P2 = load '" + PARTITIONED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
server.registerQuery("P2filter = filter P2 by bkt == '1';");
Iterator<Tuple> P2Iter = server.openIterator("P2filter");
int count2 = 0;
while (P2Iter.hasNext()) {
Tuple t = P2Iter.next();
assertEquals("1", t.get(2));
assertTrue(((Integer) t.get(0)) > 1);
count2++;
}
assertEquals(6, count2);
}
@Test
public void testReadMissingPartitionBasicNeg() throws Exception {
PigServer server = createPigServer(false);
File removedPartitionDir = new File(TEST_WAREHOUSE_DIR + "/" + PARTITIONED_TABLE + "/bkt=0");
if (!removeDirectory(removedPartitionDir)) {
System.out.println("Test did not run because its environment could not be set.");
return;
}
driver.run("select * from " + PARTITIONED_TABLE);
ArrayList<String> valuesReadFromHiveDriver = new ArrayList<String>();
driver.getResults(valuesReadFromHiveDriver);
assertTrue(valuesReadFromHiveDriver.size() == 6);
server.registerQuery("W = load '" + PARTITIONED_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
Schema dumpedWSchema = server.dumpSchema("W");
List<FieldSchema> Wfields = dumpedWSchema.getFields();
assertEquals(3, Wfields.size());
assertTrue(Wfields.get(0).alias.equalsIgnoreCase("a"));
assertTrue(Wfields.get(0).type == DataType.INTEGER);
assertTrue(Wfields.get(1).alias.equalsIgnoreCase("b"));
assertTrue(Wfields.get(1).type == DataType.CHARARRAY);
assertTrue(Wfields.get(2).alias.equalsIgnoreCase("bkt"));
assertTrue(Wfields.get(2).type == DataType.CHARARRAY);
try {
Iterator<Tuple> WIter = server.openIterator("W");
fail("Should failed in retriving an invalid partition");
} catch (IOException ioe) {
// expected
}
}
private static boolean removeDirectory(File dir) {
boolean success = false;
if (dir.isDirectory()) {
File[] files = dir.listFiles();
if (files != null && files.length > 0) {
for (File file : files) {
success = removeDirectory(file);
if (!success) {
return false;
}
}
}
success = dir.delete();
} else {
success = dir.delete();
}
return success;
}
@Test
public void testProjectionsBasic() throws IOException {
PigServer server = createPigServer(false);
// projections are handled by using generate, not "as" on the Load
server.registerQuery("Y1 = load '" + BASIC_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
server.registerQuery("Y2 = foreach Y1 generate a;");
server.registerQuery("Y3 = foreach Y1 generate b,a;");
Schema dumpedY2Schema = server.dumpSchema("Y2");
Schema dumpedY3Schema = server.dumpSchema("Y3");
List<FieldSchema> Y2fields = dumpedY2Schema.getFields();
List<FieldSchema> Y3fields = dumpedY3Schema.getFields();
assertEquals(1, Y2fields.size());
assertEquals("a", Y2fields.get(0).alias.toLowerCase());
assertEquals(DataType.INTEGER, Y2fields.get(0).type);
assertEquals(2, Y3fields.size());
assertEquals("b", Y3fields.get(0).alias.toLowerCase());
assertEquals(DataType.CHARARRAY, Y3fields.get(0).type);
assertEquals("a", Y3fields.get(1).alias.toLowerCase());
assertEquals(DataType.INTEGER, Y3fields.get(1).type);
int numTuplesRead = 0;
Iterator<Tuple> Y2Iter = server.openIterator("Y2");
while (Y2Iter.hasNext()) {
Tuple t = Y2Iter.next();
assertEquals(t.size(), 1);
assertNotNull(t.get(0));
assertTrue(t.get(0).getClass() == Integer.class);
assertEquals(t.get(0), basicInputData.get(numTuplesRead).first);
numTuplesRead++;
}
numTuplesRead = 0;
Iterator<Tuple> Y3Iter = server.openIterator("Y3");
while (Y3Iter.hasNext()) {
Tuple t = Y3Iter.next();
assertEquals(t.size(), 2);
assertNotNull(t.get(0));
assertTrue(t.get(0).getClass() == String.class);
assertEquals(t.get(0), basicInputData.get(numTuplesRead).second);
assertNotNull(t.get(1));
assertTrue(t.get(1).getClass() == Integer.class);
assertEquals(t.get(1), basicInputData.get(numTuplesRead).first);
numTuplesRead++;
}
assertEquals(basicInputData.size(), numTuplesRead);
}
@Test
public void testColumnarStorePushdown() throws Exception {
String PIGOUTPUT_DIR = TEST_DATA_DIR+ "/colpushdownop";
String PIG_FILE = "test.pig";
String expectedCols = "0,1";
PrintWriter w = new PrintWriter(new FileWriter(PIG_FILE));
w.println("A = load '" + COMPLEX_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
w.println("B = foreach A generate name,studentid;");
w.println("C = filter B by name is not null;");
w.println("store C into '" + PIGOUTPUT_DIR + "' using PigStorage();");
w.close();
try {
String[] args = { "-x", "local", PIG_FILE };
PigStats stats = PigRunner.run(args, null);
//Pig script was successful
assertTrue(stats.isSuccessful());
//Single MapReduce job is launched
OutputStats outstats = stats.getOutputStats().get(0);
assertTrue(outstats!= null);
assertEquals(expectedCols,outstats.getConf()
.get(ColumnProjectionUtils.READ_COLUMN_IDS_CONF_STR));
//delete output file on exit
FileSystem fs = FileSystem.get(outstats.getConf());
if (fs.exists(new Path(PIGOUTPUT_DIR))) {
fs.delete(new Path(PIGOUTPUT_DIR), true);
}
}finally {
new File(PIG_FILE).delete();
}
}
/**
* Tests the failure case caused by HIVE-10752
* @throws Exception
*/
@Test
public void testColumnarStorePushdown2() throws Exception {
PigServer server = createPigServer(false);
server.registerQuery("A = load '" + COMPLEX_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
server.registerQuery("B = load '" + COMPLEX_TABLE + "' using org.apache.hive.hcatalog.pig.HCatLoader();");
server.registerQuery("C = join A by name, B by name;");
server.registerQuery("D = foreach C generate B::studentid;");
server.registerQuery("E = ORDER D by studentid asc;");
Iterator<Tuple> iter = server.openIterator("E");
Tuple t = iter.next();
assertEquals(42, t.get(0));
t = iter.next();
assertEquals(1337, t.get(0));
}
@Test
public void testGetInputBytes() throws Exception {
File file = new File(TEST_WAREHOUSE_DIR + "/" + SPECIFIC_SIZE_TABLE + "/part-m-00000");
file.deleteOnExit();
RandomAccessFile randomAccessFile = new RandomAccessFile(file, "rw");
randomAccessFile.setLength(2L * 1024 * 1024 * 1024);
randomAccessFile.close();
Job job = new Job();
HCatLoader hCatLoader = new HCatLoader();
hCatLoader.setUDFContextSignature("testGetInputBytes");
hCatLoader.setLocation(SPECIFIC_SIZE_TABLE, job);
ResourceStatistics statistics = hCatLoader.getStatistics(SPECIFIC_SIZE_TABLE, job);
assertEquals(2048, (long) statistics.getmBytes());
}
/**
* Simulates Pig relying on HCatLoader to inform about input size of multiple tables.
* @throws Exception
*/
@Test
public void testGetInputBytesMultipleTables() throws Exception {
File file = new File(TEST_WAREHOUSE_DIR + "/" + SPECIFIC_SIZE_TABLE + "/part-m-00000");
file.deleteOnExit();
RandomAccessFile randomAccessFile = new RandomAccessFile(file, "rw");
randomAccessFile.setLength(987654321L);
randomAccessFile.close();
file = new File(TEST_WAREHOUSE_DIR + "/" + SPECIFIC_DATABASE + ".db/" +
SPECIFIC_SIZE_TABLE_2 + "/part-m-00000");
file.deleteOnExit();
randomAccessFile = new RandomAccessFile(file, "rw");
randomAccessFile.setLength(12345678L);
randomAccessFile.close();
Job job = new Job();
HCatLoader hCatLoader = new HCatLoader();
//Mocking that Pig would assign different signature for each POLoad operator
hCatLoader.setUDFContextSignature("testGetInputBytesMultipleTables" + SPECIFIC_SIZE_TABLE);
hCatLoader.setLocation(SPECIFIC_SIZE_TABLE, job);
HCatLoader hCatLoader2 = new HCatLoader();
hCatLoader2.setUDFContextSignature("testGetInputBytesMultipleTables" + SPECIFIC_SIZE_TABLE_2);
hCatLoader2.setLocation(SPECIFIC_DATABASE + "." + SPECIFIC_SIZE_TABLE_2, job);
HCatLoader hCatLoader3 = new HCatLoader();
hCatLoader3.setUDFContextSignature("testGetInputBytesMultipleTables" + PARTITIONED_TABLE);
hCatLoader3.setLocation(PARTITIONED_TABLE, job);
long specificTableSize = -1;
long specificTableSize2 = -1;
long partitionedTableSize = -1;
ResourceStatistics statistics = hCatLoader.getStatistics(SPECIFIC_SIZE_TABLE, job);
specificTableSize=statistics.getSizeInBytes();
assertEquals(987654321, specificTableSize);
statistics = hCatLoader2.getStatistics(SPECIFIC_SIZE_TABLE_2, job);
specificTableSize2=statistics.getSizeInBytes();
assertEquals(12345678, specificTableSize2);
statistics = hCatLoader3.getStatistics(PARTITIONED_TABLE, job);
partitionedTableSize=statistics.getSizeInBytes();
//Partitioned table size here is dependent on underlying storage format, it's ~ 20<x<2000
assertTrue(20 < partitionedTableSize && partitionedTableSize < 2000);
//No-op here, just a reminder that Pig would do the calculation of the sum itself
//e.g. when joining the 3 tables is requested
assertTrue(Math.abs((specificTableSize + specificTableSize2 + partitionedTableSize) -
(987654321+12345678+1010)) < 1010);
}
@Test
public void testConvertBooleanToInt() throws Exception {
String tbl = "test_convert_boolean_to_int";
String inputFileName = TEST_DATA_DIR + "/testConvertBooleanToInt/data.txt";
File inputDataDir = new File(inputFileName).getParentFile();
inputDataDir.mkdir();
String[] lines = new String[]{"llama\ttrue", "alpaca\tfalse"};
HcatTestUtils.createTestDataFile(inputFileName, lines);
driver.run("drop table if exists " + tbl);
driver.run("create external table " + tbl +
" (a string, b boolean) row format delimited fields terminated by '\t'" +
" stored as textfile location 'file:///" + inputDataDir.getPath().replaceAll("\\\\", "/") + "'");
Properties properties = new Properties();
properties.setProperty(HCatConstants.HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER, "true");
properties.put("stop.on.failure", Boolean.TRUE.toString());
PigServer server = createPigServer(true, properties);
server.registerQuery(
"data = load 'test_convert_boolean_to_int' using org.apache.hive.hcatalog.pig.HCatLoader();");
Schema schema = server.dumpSchema("data");
assertEquals(2, schema.getFields().size());
assertEquals("a", schema.getField(0).alias);
assertEquals(DataType.CHARARRAY, schema.getField(0).type);
assertEquals("b", schema.getField(1).alias);
if (PigHCatUtil.pigHasBooleanSupport()){
assertEquals(DataType.BOOLEAN, schema.getField(1).type);
} else {
assertEquals(DataType.INTEGER, schema.getField(1).type);
}
Iterator<Tuple> iterator = server.openIterator("data");
Tuple t = iterator.next();
assertEquals("llama", t.get(0));
assertEquals(1, t.get(1));
t = iterator.next();
assertEquals("alpaca", t.get(0));
assertEquals(0, t.get(1));
assertFalse(iterator.hasNext());
}
/**
* Test if we can read a date partitioned table
*/
@Test
public void testDatePartitionPushUp() throws Exception {
PigServer server = createPigServer(false);
server.registerQuery("X = load '" + PARTITIONED_DATE_TABLE + "' using " + HCatLoader.class.getName() + "();");
server.registerQuery("Y = filter X by dt == ToDate('2016-07-14','yyyy-MM-dd');");
Iterator<Tuple> YIter = server.openIterator("Y");
int numTuplesRead = 0;
while (YIter.hasNext()) {
Tuple t = YIter.next();
assertEquals(t.size(), 2);
numTuplesRead++;
}
assertTrue("Expected " + 1 + "; found " + numTuplesRead, numTuplesRead == 1);
}
/**
* basic tests that cover each scalar type
* https://issues.apache.org/jira/browse/HIVE-5814
*/
protected static final class AllTypesTable {
private static final String ALL_TYPES_FILE_NAME = TEST_DATA_DIR + "/alltypes.input.data";
private static final String ALL_PRIMITIVE_TYPES_TABLE = "junit_unparted_alltypes";
private static final String ALL_TYPES_SCHEMA = "( c_boolean boolean, " + //0
"c_tinyint tinyint, " + //1
"c_smallint smallint, " + //2
"c_int int, " + //3
"c_bigint bigint, " + //4
"c_float float, " + //5
"c_double double, " + //6
"c_decimal decimal(5,2), " +//7
"c_string string, " + //8
"c_char char(10), " + //9
"c_varchar varchar(20), " + //10
"c_binary binary, " + //11
"c_date date, " + //12
"c_timestamp timestamp)"; //13
/**
* raw data for #ALL_PRIMITIVE_TYPES_TABLE
* All the values are within range of target data type (column)
*/
private static final Object[][] primitiveRows = new Object[][] {
{Boolean.TRUE,Byte.MAX_VALUE,Short.MAX_VALUE, Integer.MAX_VALUE,Long.MAX_VALUE,Float.MAX_VALUE,Double.MAX_VALUE,555.22,"Kyiv","char(10)xx","varchar(20)","blah".getBytes(), Date.valueOf("2014-01-13"), Timestamp.valueOf("2014-01-13 19:26:25.0123")},
{Boolean.FALSE,Byte.MIN_VALUE,Short.MIN_VALUE, Integer.MIN_VALUE,Long.MIN_VALUE,Float.MIN_VALUE,Double.MIN_VALUE,-555.22,"Saint Petersburg","char(xx)00","varchar(yy)","doh".getBytes(),Date.valueOf("2014-01-14"), Timestamp.valueOf("2014-01-14 19:26:25.0123")}
};
/**
* Test that we properly translate data types in Hive/HCat table schema into Pig schema
*/
static void testSchemaLoadPrimitiveTypes() throws IOException {
PigServer server = createPigServer(false);
server.registerQuery("X = load '" + ALL_PRIMITIVE_TYPES_TABLE + "' using " + HCatLoader.class.getName() + "();");
Schema dumpedXSchema = server.dumpSchema("X");
List<FieldSchema> Xfields = dumpedXSchema.getFields();
assertEquals("Expected " + HCatFieldSchema.Type.numPrimitiveTypes() + " fields, found " +
Xfields.size(), HCatFieldSchema.Type.numPrimitiveTypes(), Xfields.size());
checkProjection(Xfields.get(0), "c_boolean", DataType.BOOLEAN);
checkProjection(Xfields.get(1), "c_tinyint", DataType.INTEGER);
checkProjection(Xfields.get(2), "c_smallint", DataType.INTEGER);
checkProjection(Xfields.get(3), "c_int", DataType.INTEGER);
checkProjection(Xfields.get(4), "c_bigint", DataType.LONG);
checkProjection(Xfields.get(5), "c_float", DataType.FLOAT);
checkProjection(Xfields.get(6), "c_double", DataType.DOUBLE);
checkProjection(Xfields.get(7), "c_decimal", DataType.BIGDECIMAL);
checkProjection(Xfields.get(8), "c_string", DataType.CHARARRAY);
checkProjection(Xfields.get(9), "c_char", DataType.CHARARRAY);
checkProjection(Xfields.get(10), "c_varchar", DataType.CHARARRAY);
checkProjection(Xfields.get(11), "c_binary", DataType.BYTEARRAY);
checkProjection(Xfields.get(12), "c_date", DataType.DATETIME);
checkProjection(Xfields.get(13), "c_timestamp", DataType.DATETIME);
}
/**
* Test that value from Hive table are read properly in Pig
*/
private static void testReadDataPrimitiveTypes() throws Exception {
// testConvertBooleanToInt() sets HCatConstants.HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER=true, and
// might be the last one to call HCatContext.INSTANCE.setConf(). Make sure setting is false.
Properties properties = new Properties();
properties.setProperty(HCatConstants.HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER, "false");
PigServer server = createPigServer(false, properties);
server.registerQuery("X = load '" + ALL_PRIMITIVE_TYPES_TABLE + "' using " + HCatLoader.class.getName() + "();");
Iterator<Tuple> XIter = server.openIterator("X");
int numTuplesRead = 0;
while (XIter.hasNext()) {
Tuple t = XIter.next();
assertEquals(HCatFieldSchema.Type.numPrimitiveTypes(), t.size());
int colPos = 0;
for (Object referenceData : primitiveRows[numTuplesRead]) {
if (referenceData == null) {
assertTrue("rowNum=" + numTuplesRead + " colNum=" + colPos
+ " Reference data is null; actual "
+ t.get(colPos), t.get(colPos) == null);
} else if (referenceData instanceof Date) {
// Note that here we ignore nanos part of Hive Timestamp since nanos are dropped when
// reading Hive from Pig by design.
assertTrue("rowNum=" + numTuplesRead + " colNum=" + colPos
+ " Reference data=" + ((Date)referenceData).toEpochMilli()
+ " actual=" + ((DateTime)t.get(colPos)).getMillis()
+ "; types=(" + referenceData.getClass() + "," + t.get(colPos).getClass() + ")",
((Date)referenceData).toEpochMilli() == ((DateTime)t.get(colPos)).getMillis());
} else if (referenceData instanceof Timestamp) {
// Note that here we ignore nanos part of Hive Timestamp since nanos are dropped when
// reading Hive from Pig by design.
assertTrue("rowNum=" + numTuplesRead + " colNum=" + colPos
+ " Reference data=" + ((Timestamp)referenceData).toEpochMilli()
+ " actual=" + ((DateTime)t.get(colPos)).getMillis()
+ "; types=(" + referenceData.getClass() + "," + t.get(colPos).getClass() + ")",
((Timestamp)referenceData).toEpochMilli()== ((DateTime)t.get(colPos)).getMillis());
} else {
// Doing String comps here as value objects in Hive in Pig are different so equals()
// doesn't work.
assertTrue("rowNum=" + numTuplesRead + " colNum=" + colPos
+ " Reference data=" + referenceData + " actual=" + t.get(colPos)
+ "; types=(" + referenceData.getClass() + "," + t.get(colPos).getClass() + ") ",
referenceData.toString().equals(t.get(colPos).toString()));
}
colPos++;
}
numTuplesRead++;
}
assertTrue("Expected " + primitiveRows.length + "; found " + numTuplesRead, numTuplesRead == primitiveRows.length);
}
private static void setupAllTypesTable(IDriver driver) throws Exception {
String[] primitiveData = new String[primitiveRows.length];
for (int i = 0; i < primitiveRows.length; i++) {
Object[] rowData = primitiveRows[i];
StringBuilder row = new StringBuilder();
for (Object cell : rowData) {
row.append(row.length() == 0 ? "" : "\t").append(cell == null ? null : cell);
}
primitiveData[i] = row.toString();
}
HcatTestUtils.createTestDataFile(ALL_TYPES_FILE_NAME, primitiveData);
String cmd = "create table " + ALL_PRIMITIVE_TYPES_TABLE + ALL_TYPES_SCHEMA +
"ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'" +
" STORED AS TEXTFILE";
executeStatementOnDriver(cmd, driver);
cmd = "load data local inpath '" + HCatUtil.makePathASafeFileName(ALL_TYPES_FILE_NAME) +
"' into table " + ALL_PRIMITIVE_TYPES_TABLE;
executeStatementOnDriver(cmd, driver);
}
}
}
|
apache/ozone | 38,109 | hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestHddsDispatcher.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.common.impl;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.hadoop.hdds.fs.MockSpaceUsagePersistence.inMemory;
import static org.apache.hadoop.hdds.scm.ScmConfigKeys.HDDS_DATANODE_DIR_KEY;
import static org.apache.hadoop.hdds.scm.protocolPB.ContainerCommandResponseBuilders.getContainerCommandResponse;
import static org.apache.hadoop.ozone.container.common.ContainerTestUtils.COMMIT_STAGE;
import static org.apache.hadoop.ozone.container.common.impl.ContainerImplTestUtils.newContainerSet;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.google.common.collect.Maps;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.time.Duration;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.conf.StorageUnit;
import org.apache.hadoop.hdds.client.BlockID;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.fs.MockSpaceUsageCheckFactory;
import org.apache.hadoop.hdds.fs.MockSpaceUsageSource;
import org.apache.hadoop.hdds.fs.SpaceUsageCheckFactory;
import org.apache.hadoop.hdds.fs.SpaceUsageSource;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandRequestProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandRequestProtoOrBuilder;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandResponseProto;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerType;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.WriteChunkRequestProto;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerAction;
import org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException;
import org.apache.hadoop.hdds.security.token.TokenVerifier;
import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.ozone.common.Checksum;
import org.apache.hadoop.ozone.common.ChecksumData;
import org.apache.hadoop.ozone.common.OzoneChecksumException;
import org.apache.hadoop.ozone.common.utils.BufferUtils;
import org.apache.hadoop.ozone.container.ContainerTestHelper;
import org.apache.hadoop.ozone.container.checksum.ContainerChecksumTreeManager;
import org.apache.hadoop.ozone.container.common.ContainerTestUtils;
import org.apache.hadoop.ozone.container.common.helpers.ContainerMetrics;
import org.apache.hadoop.ozone.container.common.interfaces.Container;
import org.apache.hadoop.ozone.container.common.interfaces.Handler;
import org.apache.hadoop.ozone.container.common.interfaces.VolumeChoosingPolicy;
import org.apache.hadoop.ozone.container.common.report.IncrementalReportSender;
import org.apache.hadoop.ozone.container.common.statemachine.DatanodeConfiguration;
import org.apache.hadoop.ozone.container.common.statemachine.DatanodeStateMachine;
import org.apache.hadoop.ozone.container.common.statemachine.StateContext;
import org.apache.hadoop.ozone.container.common.transport.server.ratis.DispatcherContext;
import org.apache.hadoop.ozone.container.common.transport.server.ratis.DispatcherContext.Op;
import org.apache.hadoop.ozone.container.common.transport.server.ratis.DispatcherContext.WriteChunkStage;
import org.apache.hadoop.ozone.container.common.utils.StorageVolumeUtil;
import org.apache.hadoop.ozone.container.common.volume.HddsVolume;
import org.apache.hadoop.ozone.container.common.volume.MutableVolumeSet;
import org.apache.hadoop.ozone.container.common.volume.RoundRobinVolumeChoosingPolicy;
import org.apache.hadoop.ozone.container.common.volume.StorageVolume;
import org.apache.hadoop.ozone.container.common.volume.VolumeChoosingPolicyFactory;
import org.apache.hadoop.ozone.container.keyvalue.ContainerLayoutTestInfo;
import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainer;
import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData;
import org.apache.hadoop.security.token.Token;
import org.apache.ozone.test.GenericTestUtils.LogCapturer;
import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Test-cases to verify the functionality of HddsDispatcher.
*/
public class TestHddsDispatcher {
@TempDir
private Path tempDir;
private static final Logger LOG = LoggerFactory.getLogger(
TestHddsDispatcher.class);
@TempDir
private File testDir;
private static VolumeChoosingPolicy volumeChoosingPolicy;
public static final IncrementalReportSender<Container> NO_OP_ICR_SENDER =
c -> {
};
@BeforeAll
public static void init() {
volumeChoosingPolicy = VolumeChoosingPolicyFactory.getPolicy(new OzoneConfiguration());
}
/**
* Tests that close container action is sent when a container is full. First two containers are created. Then we
* write to one of them to confirm normal writes are successful. Then we increase the used space of both containers
* such that they're close to full, and write to both of them simultaneously. The expectation is that close
* container action should be added for both of them and two immediate heartbeats should be sent. Next, we write
* again to the first container. This time the close container action should be queued but immediate heartbeat
* should not be sent because of throttling. This confirms that the throttling is per container.
* @param layout
* @throws IOException
*/
@ContainerLayoutTestInfo.ContainerTest
public void testContainerCloseActionWhenFull(
ContainerLayoutVersion layout) throws IOException {
String testDirPath = testDir.getPath();
OzoneConfiguration conf = new OzoneConfiguration();
conf.set(HDDS_DATANODE_DIR_KEY, testDirPath);
conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS, testDirPath);
DatanodeDetails dd = randomDatanodeDetails();
MutableVolumeSet volumeSet = new MutableVolumeSet(dd.getUuidString(), "test", conf,
null, StorageVolume.VolumeType.DATA_VOLUME, null);
volumeSet.getVolumesList().forEach(e -> e.setState(StorageVolume.VolumeState.NORMAL));
volumeSet.startAllVolume();
try {
UUID scmId = UUID.randomUUID();
ContainerSet containerSet = newContainerSet();
StateContext context = ContainerTestUtils.getMockContext(dd, conf);
// create both containers
KeyValueContainerData containerData = new KeyValueContainerData(1L,
layout,
(long) StorageUnit.GB.toBytes(1), UUID.randomUUID().toString(),
dd.getUuidString());
KeyValueContainerData containerData2 = new KeyValueContainerData(2L,
layout, (long) StorageUnit.GB.toBytes(1), UUID.randomUUID().toString(), dd.getUuidString());
Container container = new KeyValueContainer(containerData, conf);
Container container2 = new KeyValueContainer(containerData2, conf);
StorageVolumeUtil.getHddsVolumesList(volumeSet.getVolumesList())
.forEach(hddsVolume -> hddsVolume.setDbParentDir(tempDir.toFile()));
container.create(volumeSet, new RoundRobinVolumeChoosingPolicy(),
scmId.toString());
container2.create(volumeSet, new RoundRobinVolumeChoosingPolicy(),
scmId.toString());
containerSet.addContainer(container);
containerSet.addContainer(container2);
ContainerMetrics metrics = ContainerMetrics.create(conf);
Map<ContainerType, Handler> handlers = Maps.newHashMap();
for (ContainerType containerType : ContainerType.values()) {
handlers.put(containerType, Handler.getHandlerForContainerType(containerType, conf,
context.getParent().getDatanodeDetails().getUuidString(),
containerSet, volumeSet, volumeChoosingPolicy, metrics, NO_OP_ICR_SENDER,
new ContainerChecksumTreeManager(conf)));
}
// write successfully to first container
HddsDispatcher hddsDispatcher = new HddsDispatcher(
conf, containerSet, volumeSet, handlers, context, metrics, null);
hddsDispatcher.setClusterId(scmId.toString());
ContainerCommandResponseProto responseOne = hddsDispatcher
.dispatch(getWriteChunkRequest(dd.getUuidString(), 1L, 1L), null);
assertEquals(ContainerProtos.Result.SUCCESS,
responseOne.getResult());
verify(context, times(0))
.addContainerActionIfAbsent(any(ContainerAction.class));
// increment used space of both containers
containerData.getStatistics().setBlockBytesForTesting(Double.valueOf(
StorageUnit.MB.toBytes(950)).longValue());
containerData2.getStatistics().setBlockBytesForTesting(Double.valueOf(
StorageUnit.MB.toBytes(950)).longValue());
ContainerCommandResponseProto responseTwo = hddsDispatcher
.dispatch(getWriteChunkRequest(dd.getUuidString(), 1L, 2L), null);
ContainerCommandResponseProto responseThree = hddsDispatcher
.dispatch(getWriteChunkRequest(dd.getUuidString(), 2L, 1L), null);
assertEquals(ContainerProtos.Result.SUCCESS,
responseTwo.getResult());
assertEquals(ContainerProtos.Result.SUCCESS, responseThree.getResult());
// container action should be added for both containers
verify(context, times(2))
.addContainerActionIfAbsent(any(ContainerAction.class));
DatanodeStateMachine stateMachine = context.getParent();
// immediate heartbeat should be triggered for both the containers
verify(stateMachine, times(2)).triggerHeartbeat();
// if we write again to container 1, the container action should get added but heartbeat should not get triggered
// again because of throttling
hddsDispatcher.dispatch(getWriteChunkRequest(dd.getUuidString(), 1L, 3L), null);
verify(context, times(3)).addContainerActionIfAbsent(any(ContainerAction.class));
verify(stateMachine, times(2)).triggerHeartbeat(); // was called twice before
} finally {
volumeSet.shutdown();
ContainerMetrics.remove();
}
}
@Test
public void testSmallFileChecksum() throws IOException {
String testDirPath = testDir.getPath();
try {
UUID scmId = UUID.randomUUID();
OzoneConfiguration conf = new OzoneConfiguration();
conf.set(HDDS_DATANODE_DIR_KEY, testDirPath);
conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS, testDirPath);
DatanodeConfiguration dnConf = conf.getObject(DatanodeConfiguration.class);
dnConf.setChunkDataValidationCheck(true);
conf.setFromObject(dnConf);
DatanodeDetails dd = randomDatanodeDetails();
HddsDispatcher hddsDispatcher = createDispatcher(dd, scmId, conf);
ContainerCommandResponseProto smallFileResponse =
hddsDispatcher.dispatch(newPutSmallFile(1L, 1L), null);
assertEquals(ContainerProtos.Result.SUCCESS, smallFileResponse.getResult());
} finally {
ContainerMetrics.remove();
}
}
@Test
public void testWriteChunkChecksum() throws IOException {
String testDirPath = testDir.getPath();
try {
UUID scmId = UUID.randomUUID();
OzoneConfiguration conf = new OzoneConfiguration();
conf.set(HDDS_DATANODE_DIR_KEY, testDirPath);
conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS, testDirPath);
DatanodeConfiguration dnConf = conf.getObject(DatanodeConfiguration.class);
dnConf.setChunkDataValidationCheck(true);
conf.setFromObject(dnConf);
DatanodeDetails dd = randomDatanodeDetails();
HddsDispatcher hddsDispatcher = createDispatcher(dd, scmId, conf);
//Send a few WriteChunkRequests
ContainerCommandResponseProto response;
ContainerCommandRequestProto writeChunkRequest0 = getWriteChunkRequest0(dd.getUuidString(), 1L, 1L, 0);
hddsDispatcher.dispatch(writeChunkRequest0, null);
hddsDispatcher.dispatch(getWriteChunkRequest0(dd.getUuidString(), 1L, 1L, 1), null);
response = hddsDispatcher.dispatch(getWriteChunkRequest0(dd.getUuidString(), 1L, 1L, 2), null);
assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
// Send Read Chunk request for written chunk.
response =
hddsDispatcher.dispatch(getReadChunkRequest(writeChunkRequest0), null);
assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
ByteString responseData = BufferUtils.concatByteStrings(
response.getReadChunk().getDataBuffers().getBuffersList());
assertEquals(writeChunkRequest0.getWriteChunk().getData(),
responseData);
// Test checksum on Read:
final DispatcherContext context = DispatcherContext
.newBuilder(DispatcherContext.Op.READ_STATE_MACHINE_DATA)
.build();
response =
hddsDispatcher.dispatch(getReadChunkRequest(writeChunkRequest0), context);
assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
} finally {
ContainerMetrics.remove();
}
}
@ContainerLayoutTestInfo.ContainerTest
public void testContainerCloseActionWhenVolumeFull(
ContainerLayoutVersion layoutVersion) throws Exception {
String testDirPath = testDir.getPath();
OzoneConfiguration conf = new OzoneConfiguration();
conf.setStorageSize(DatanodeConfiguration.HDDS_DATANODE_VOLUME_MIN_FREE_SPACE,
100.0, StorageUnit.BYTES);
DatanodeDetails dd = randomDatanodeDetails();
HddsVolume.Builder volumeBuilder =
new HddsVolume.Builder(testDirPath).datanodeUuid(dd.getUuidString())
.conf(conf).usageCheckFactory(MockSpaceUsageCheckFactory.NONE).clusterID("test");
// state of cluster : available (160) > 100 ,datanode volume
// utilisation threshold not yet reached. container creates are successful.
AtomicLong usedSpace = new AtomicLong(340);
SpaceUsageSource spaceUsage = MockSpaceUsageSource.of(500, usedSpace);
SpaceUsageCheckFactory factory = MockSpaceUsageCheckFactory.of(
spaceUsage, Duration.ZERO, inMemory(new AtomicLong(0)));
volumeBuilder.usageCheckFactory(factory);
MutableVolumeSet volumeSet = mock(MutableVolumeSet.class);
when(volumeSet.getVolumesList())
.thenReturn(Collections.singletonList(volumeBuilder.build()));
volumeSet.getVolumesList().get(0).setState(StorageVolume.VolumeState.NORMAL);
volumeSet.getVolumesList().get(0).start();
try {
UUID scmId = UUID.randomUUID();
ContainerSet containerSet = newContainerSet();
StateContext context = ContainerTestUtils.getMockContext(dd, conf);
DatanodeStateMachine stateMachine = context.getParent();
// create a 50 byte container
// available (160) > 100 (min free space) + 50 (container size)
KeyValueContainerData containerData = new KeyValueContainerData(1L,
layoutVersion,
50, UUID.randomUUID().toString(),
dd.getUuidString());
Container container = new KeyValueContainer(containerData, conf);
StorageVolumeUtil.getHddsVolumesList(volumeSet.getVolumesList())
.forEach(hddsVolume -> hddsVolume.setDbParentDir(tempDir.toFile()));
container.create(volumeSet, new RoundRobinVolumeChoosingPolicy(),
scmId.toString());
containerSet.addContainer(container);
ContainerMetrics metrics = ContainerMetrics.create(conf);
Map<ContainerType, Handler> handlers = Maps.newHashMap();
for (ContainerType containerType : ContainerType.values()) {
handlers.put(containerType,
Handler.getHandlerForContainerType(containerType, conf,
context.getParent().getDatanodeDetails().getUuidString(),
containerSet, volumeSet, volumeChoosingPolicy, metrics, NO_OP_ICR_SENDER,
new ContainerChecksumTreeManager(conf)));
}
HddsDispatcher hddsDispatcher = new HddsDispatcher(
conf, containerSet, volumeSet, handlers, context, metrics, null);
hddsDispatcher.setClusterId(scmId.toString());
containerData.getVolume().getVolumeUsage()
.ifPresent(usage -> usage.incrementUsedSpace(60));
usedSpace.addAndGet(60);
ContainerCommandResponseProto response = hddsDispatcher
.dispatch(getWriteChunkRequest(dd.getUuidString(), 1L, 1L), null);
assertEquals(ContainerProtos.Result.DISK_OUT_OF_SPACE, response.getResult());
verify(context, times(1))
.addContainerActionIfAbsent(any(ContainerAction.class));
// verify that immediate heartbeat is triggered
verify(stateMachine, times(1)).triggerHeartbeat();
// the volume has reached the min free space boundary but this time the heartbeat should not be triggered because
// of throttling
hddsDispatcher.dispatch(getWriteChunkRequest(dd.getUuidString(), 1L, 2L), null);
verify(context, times(2)).addContainerActionIfAbsent(any(ContainerAction.class));
verify(stateMachine, times(1)).triggerHeartbeat(); // was called once before
// try creating another container now as the volume used has crossed
// threshold
KeyValueContainerData containerData2 = new KeyValueContainerData(1L,
layoutVersion,
50, UUID.randomUUID().toString(),
dd.getUuidString());
Container container2 = new KeyValueContainer(containerData2, conf);
StorageContainerException scException =
assertThrows(StorageContainerException.class,
() -> container2.create(volumeSet,
new RoundRobinVolumeChoosingPolicy(), scmId.toString()));
assertEquals("Container creation failed, due to disk out of space",
scException.getMessage());
} finally {
volumeSet.shutdown();
ContainerMetrics.remove();
}
}
@Test
public void testCreateContainerWithWriteChunk() throws IOException {
String testDirPath = testDir.getPath();
try {
UUID scmId = UUID.randomUUID();
OzoneConfiguration conf = new OzoneConfiguration();
conf.set(HDDS_DATANODE_DIR_KEY, testDirPath);
conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS, testDirPath);
DatanodeDetails dd = randomDatanodeDetails();
HddsDispatcher hddsDispatcher = createDispatcher(dd, scmId, conf);
ContainerCommandRequestProto writeChunkRequest =
getWriteChunkRequest(dd.getUuidString(), 1L, 1L);
// send read chunk request and make sure container does not exist
ContainerCommandResponseProto response =
hddsDispatcher.dispatch(getReadChunkRequest(writeChunkRequest), null);
assertEquals(response.getResult(),
ContainerProtos.Result.CONTAINER_NOT_FOUND);
// send write chunk request without sending create container
response = hddsDispatcher.dispatch(writeChunkRequest, null);
// container should be created as part of write chunk request
assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
// send read chunk request to read the chunk written above
response =
hddsDispatcher.dispatch(getReadChunkRequest(writeChunkRequest), null);
assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
ByteString responseData = BufferUtils.concatByteStrings(
response.getReadChunk().getDataBuffers().getBuffersList());
assertEquals(writeChunkRequest.getWriteChunk().getData(),
responseData);
// put block
ContainerCommandRequestProto putBlockRequest =
ContainerTestHelper.getPutBlockRequest(writeChunkRequest);
response = hddsDispatcher.dispatch(putBlockRequest, null);
assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
// send list block request
ContainerCommandRequestProto listBlockRequest =
ContainerTestHelper.getListBlockRequest(writeChunkRequest);
response = hddsDispatcher.dispatch(listBlockRequest, null);
assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
assertEquals(1, response.getListBlock().getBlockDataList().size());
for (ContainerProtos.BlockData blockData :
response.getListBlock().getBlockDataList()) {
assertEquals(writeChunkRequest.getWriteChunk().getBlockID(),
blockData.getBlockID());
assertEquals(writeChunkRequest.getWriteChunk().getChunkData()
.getLen(), blockData.getSize());
assertEquals(1, blockData.getChunksCount());
}
} finally {
ContainerMetrics.remove();
}
}
@Test
public void testContainerNotFoundWithCommitChunk() throws IOException {
String testDirPath = testDir.getPath();
try {
UUID scmId = UUID.randomUUID();
OzoneConfiguration conf = new OzoneConfiguration();
conf.set(HDDS_DATANODE_DIR_KEY, testDirPath);
conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS, testDirPath);
DatanodeDetails dd = randomDatanodeDetails();
HddsDispatcher hddsDispatcher = createDispatcher(dd, scmId, conf);
ContainerCommandRequestProto writeChunkRequest =
getWriteChunkRequest(dd.getUuidString(), 1L, 1L);
// send read chunk request and make sure container does not exist
ContainerCommandResponseProto response =
hddsDispatcher.dispatch(getReadChunkRequest(writeChunkRequest), null);
assertEquals(
ContainerProtos.Result.CONTAINER_NOT_FOUND, response.getResult());
LogCapturer logCapturer = LogCapturer.captureLogs(HddsDispatcher.class);
// send write chunk request without sending create container
response = hddsDispatcher.dispatch(writeChunkRequest, COMMIT_STAGE);
// container should not be found
assertEquals(
ContainerProtos.Result.CONTAINER_NOT_FOUND, response.getResult());
assertThat(logCapturer.getOutput()).contains(
"ContainerID " + writeChunkRequest.getContainerID()
+ " does not exist");
} finally {
ContainerMetrics.remove();
}
}
@Test
public void testWriteChunkWithCreateContainerFailure() throws IOException {
String testDirPath = testDir.getPath();
try {
UUID scmId = UUID.randomUUID();
OzoneConfiguration conf = new OzoneConfiguration();
conf.set(HDDS_DATANODE_DIR_KEY, testDirPath);
conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS, testDirPath);
DatanodeDetails dd = randomDatanodeDetails();
HddsDispatcher hddsDispatcher = createDispatcher(dd, scmId, conf);
ContainerCommandRequestProto writeChunkRequest = getWriteChunkRequest(
dd.getUuidString(), 1L, 1L);
HddsDispatcher mockDispatcher = spy(hddsDispatcher);
ContainerCommandResponseProto.Builder builder =
getContainerCommandResponse(writeChunkRequest,
ContainerProtos.Result.DISK_OUT_OF_SPACE, "");
// Return DISK_OUT_OF_SPACE response when writing chunk
// with container creation.
doReturn(builder.build()).when(mockDispatcher)
.createContainer(writeChunkRequest);
LogCapturer logCapturer = LogCapturer.captureLogs(HddsDispatcher.class);
// send write chunk request without sending create container
mockDispatcher.dispatch(writeChunkRequest, null);
// verify the error log
assertThat(logCapturer.getOutput())
.contains("ContainerID " + writeChunkRequest.getContainerID()
+ " creation failed , Result: DISK_OUT_OF_SPACE");
} finally {
ContainerMetrics.remove();
}
}
@Test
public void testDuplicateWriteChunkAndPutBlockRequest() throws IOException {
String testDirPath = testDir.getPath();
try {
UUID scmId = UUID.randomUUID();
OzoneConfiguration conf = new OzoneConfiguration();
conf.set(HDDS_DATANODE_DIR_KEY, testDirPath);
conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS, testDirPath);
DatanodeDetails dd = randomDatanodeDetails();
HddsDispatcher hddsDispatcher = createDispatcher(dd, scmId, conf);
ContainerCommandRequestProto writeChunkRequest = getWriteChunkRequest(
dd.getUuidString(), 1L, 1L);
//Send same WriteChunkRequest
ContainerCommandResponseProto response;
hddsDispatcher.dispatch(writeChunkRequest, null);
hddsDispatcher.dispatch(writeChunkRequest, null);
response = hddsDispatcher.dispatch(writeChunkRequest, null);
assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
// Send Read Chunk request for written chunk.
response =
hddsDispatcher.dispatch(getReadChunkRequest(writeChunkRequest), null);
assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
ByteString responseData = BufferUtils.concatByteStrings(
response.getReadChunk().getDataBuffers().getBuffersList());
assertEquals(writeChunkRequest.getWriteChunk().getData(),
responseData);
// Put Block
ContainerCommandRequestProto putBlockRequest =
ContainerTestHelper.getPutBlockRequest(writeChunkRequest);
//Send same PutBlockRequest
hddsDispatcher.dispatch(putBlockRequest, null);
hddsDispatcher.dispatch(putBlockRequest, null);
response = hddsDispatcher.dispatch(putBlockRequest, null);
assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
// Check PutBlock Data
ContainerCommandRequestProto listBlockRequest =
ContainerTestHelper.getListBlockRequest(writeChunkRequest);
response = hddsDispatcher.dispatch(listBlockRequest, null);
assertEquals(ContainerProtos.Result.SUCCESS, response.getResult());
assertEquals(1, response.getListBlock().getBlockDataList().size());
for (ContainerProtos.BlockData blockData :
response.getListBlock().getBlockDataList()) {
assertEquals(writeChunkRequest.getWriteChunk().getBlockID(),
blockData.getBlockID());
assertEquals(writeChunkRequest.getWriteChunk().getChunkData()
.getLen(), blockData.getSize());
assertEquals(1, blockData.getChunksCount());
}
} finally {
ContainerMetrics.remove();
}
}
/**
* Creates HddsDispatcher instance with given infos.
* @param dd datanode detail info.
* @param scmId UUID of scm id.
* @param conf configuration be used.
* @return HddsDispatcher HddsDispatcher instance.
* @throws IOException
*/
static HddsDispatcher createDispatcher(DatanodeDetails dd, UUID scmId,
OzoneConfiguration conf) throws IOException {
return createDispatcher(dd, scmId, conf, null);
}
static HddsDispatcher createDispatcher(DatanodeDetails dd, UUID scmId,
OzoneConfiguration conf, TokenVerifier tokenVerifier) throws IOException {
ContainerSet containerSet = newContainerSet();
MutableVolumeSet volumeSet = new MutableVolumeSet(dd.getUuidString(), conf, null,
StorageVolume.VolumeType.DATA_VOLUME, null);
volumeSet.getVolumesList().stream().forEach(v -> {
try {
v.format(scmId.toString());
v.createWorkingDir(scmId.toString(), null);
} catch (IOException e) {
throw new RuntimeException(e);
}
});
volumeSet.startAllVolume();
StateContext context = ContainerTestUtils.getMockContext(dd, conf);
ContainerMetrics metrics = ContainerMetrics.create(conf);
Map<ContainerType, Handler> handlers = Maps.newHashMap();
for (ContainerType containerType : ContainerType.values()) {
handlers.put(containerType,
Handler.getHandlerForContainerType(containerType, conf,
context.getParent().getDatanodeDetails().getUuidString(),
containerSet, volumeSet, volumeChoosingPolicy, metrics, NO_OP_ICR_SENDER,
new ContainerChecksumTreeManager(conf)));
}
final HddsDispatcher hddsDispatcher = new HddsDispatcher(conf,
containerSet, volumeSet, handlers, context, metrics, tokenVerifier);
hddsDispatcher.setClusterId(scmId.toString());
return hddsDispatcher;
}
// This method has to be removed once we move scm/TestUtils.java
// from server-scm project to container-service or to common project.
private static DatanodeDetails randomDatanodeDetails() {
DatanodeDetails.Port containerPort = DatanodeDetails.newStandalonePort(0);
DatanodeDetails.Port ratisPort = DatanodeDetails.newRatisPort(0);
DatanodeDetails.Port restPort = DatanodeDetails.newRestPort(0);
DatanodeDetails.Builder builder = DatanodeDetails.newBuilder();
builder.setUuid(UUID.randomUUID())
.setHostName("localhost")
.setIpAddress("127.0.0.1")
.addPort(containerPort)
.addPort(ratisPort)
.addPort(restPort);
return builder.build();
}
private ContainerCommandRequestProto getWriteChunkRequest(
String datanodeId, Long containerId, Long localId) {
ByteString data = ByteString.copyFrom(
UUID.randomUUID().toString().getBytes(UTF_8));
ContainerProtos.ChunkInfo chunk = ContainerProtos.ChunkInfo
.newBuilder()
.setChunkName(
DigestUtils.md5Hex("dummy-key") + "_stream_"
+ containerId + "_chunk_" + localId)
.setOffset(0)
.setLen(data.size())
.setChecksumData(Checksum.getNoChecksumDataProto())
.build();
WriteChunkRequestProto.Builder writeChunkRequest = WriteChunkRequestProto
.newBuilder()
.setBlockID(new BlockID(containerId, localId)
.getDatanodeBlockIDProtobuf())
.setChunkData(chunk)
.setData(data);
return ContainerCommandRequestProto
.newBuilder()
.setContainerID(containerId)
.setCmdType(ContainerProtos.Type.WriteChunk)
.setDatanodeUuid(datanodeId)
.setWriteChunk(writeChunkRequest)
.build();
}
static ChecksumData checksum(ByteString data) {
try {
return new Checksum(ContainerProtos.ChecksumType.CRC32, 256)
.computeChecksum(data.asReadOnlyByteBuffer());
} catch (OzoneChecksumException e) {
throw new IllegalStateException(e);
}
}
private ContainerCommandRequestProto getWriteChunkRequest0(
String datanodeId, Long containerId, Long localId, int chunkNum) {
final int lenOfBytes = 32;
ByteString chunkData = ByteString.copyFrom(RandomUtils.secure().randomBytes(32));
ContainerProtos.ChunkInfo chunk = ContainerProtos.ChunkInfo
.newBuilder()
.setChunkName(
DigestUtils.md5Hex("dummy-key") + "_stream_"
+ containerId + "_chunk_" + localId)
.setOffset((long) chunkNum * lenOfBytes)
.setLen(lenOfBytes)
.setChecksumData(checksum(chunkData).getProtoBufMessage())
.build();
WriteChunkRequestProto.Builder writeChunkRequest = WriteChunkRequestProto
.newBuilder()
.setBlockID(new BlockID(containerId, localId)
.getDatanodeBlockIDProtobuf())
.setChunkData(chunk)
.setData(chunkData);
return ContainerCommandRequestProto
.newBuilder()
.setContainerID(containerId)
.setCmdType(ContainerProtos.Type.WriteChunk)
.setDatanodeUuid(datanodeId)
.setWriteChunk(writeChunkRequest)
.build();
}
static ContainerCommandRequestProto newPutSmallFile(Long containerId, Long localId) {
ByteString chunkData = ByteString.copyFrom(RandomUtils.secure().randomBytes(32));
return newPutSmallFile(new BlockID(containerId, localId), chunkData);
}
static ContainerCommandRequestProto newPutSmallFile(
BlockID blockID, ByteString data) {
final ContainerProtos.BlockData.Builder blockData
= ContainerProtos.BlockData.newBuilder()
.setBlockID(blockID.getDatanodeBlockIDProtobuf());
final ContainerProtos.PutBlockRequestProto.Builder putBlockRequest
= ContainerProtos.PutBlockRequestProto.newBuilder()
.setBlockData(blockData);
final ContainerProtos.KeyValue keyValue = ContainerProtos.KeyValue.newBuilder()
.setKey("OverWriteRequested")
.setValue("true")
.build();
final ContainerProtos.ChunkInfo chunk = ContainerProtos.ChunkInfo.newBuilder()
.setChunkName(blockID.getLocalID() + "_chunk")
.setOffset(0)
.setLen(data.size())
.addMetadata(keyValue)
.setChecksumData(checksum(data).getProtoBufMessage())
.build();
final ContainerProtos.PutSmallFileRequestProto putSmallFileRequest
= ContainerProtos.PutSmallFileRequestProto.newBuilder()
.setChunkInfo(chunk)
.setBlock(putBlockRequest)
.setData(data)
.build();
return ContainerCommandRequestProto.newBuilder()
.setCmdType(ContainerProtos.Type.PutSmallFile)
.setContainerID(blockID.getContainerID())
.setDatanodeUuid(UUID.randomUUID().toString())
.setPutSmallFile(putSmallFileRequest)
.build();
}
/**
* Creates container read chunk request using input container write chunk
* request.
*
* @param writeChunkRequest - Input container write chunk request
* @return container read chunk request
*/
private ContainerCommandRequestProto getReadChunkRequest(
ContainerCommandRequestProto writeChunkRequest) {
WriteChunkRequestProto writeChunk = writeChunkRequest.getWriteChunk();
ContainerProtos.ReadChunkRequestProto.Builder readChunkRequest =
ContainerProtos.ReadChunkRequestProto.newBuilder()
.setBlockID(writeChunk.getBlockID())
.setChunkData(writeChunk.getChunkData())
.setReadChunkVersion(ContainerProtos.ReadChunkVersion.V1);
return ContainerCommandRequestProto.newBuilder()
.setCmdType(ContainerProtos.Type.ReadChunk)
.setContainerID(writeChunk.getBlockID().getContainerID())
.setTraceID(writeChunkRequest.getTraceID())
.setDatanodeUuid(writeChunkRequest.getDatanodeUuid())
.setReadChunk(readChunkRequest)
.build();
}
@Test
public void testValidateToken() throws Exception {
try {
final OzoneConfiguration conf = new OzoneConfiguration();
conf.set(HDDS_DATANODE_DIR_KEY, testDir.getPath());
conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS, testDir.getPath());
final DatanodeDetails dd = randomDatanodeDetails();
final UUID scmId = UUID.randomUUID();
final AtomicBoolean verified = new AtomicBoolean();
final TokenVerifier tokenVerifier = new TokenVerifier() {
private void verify() {
final boolean previous = verified.getAndSet(true);
assertFalse(previous);
}
@Override
public void verify(ContainerCommandRequestProtoOrBuilder cmd,
String encodedToken) {
verify();
}
@Override
public void verify(Token<?> token,
ContainerCommandRequestProtoOrBuilder cmd) {
verify();
}
};
final ContainerCommandRequestProto request = getWriteChunkRequest(
dd.getUuidString(), 1L, 1L);
final HddsDispatcher dispatcher = createDispatcher(
dd, scmId, conf, tokenVerifier);
final DispatcherContext[] notVerify = {
newContext(Op.WRITE_STATE_MACHINE_DATA, WriteChunkStage.WRITE_DATA),
newContext(Op.READ_STATE_MACHINE_DATA),
newContext(Op.APPLY_TRANSACTION),
newContext(Op.STREAM_LINK, WriteChunkStage.COMMIT_DATA)
};
for (DispatcherContext context : notVerify) {
LOG.info("notVerify {}", context);
assertFalse(verified.get());
dispatcher.dispatch(request, context);
assertFalse(verified.get());
}
final Op[] verify = {
Op.NULL,
Op.HANDLE_GET_SMALL_FILE,
Op.HANDLE_PUT_SMALL_FILE,
Op.HANDLE_READ_CHUNK,
Op.HANDLE_WRITE_CHUNK,
Op.STREAM_INIT,
};
for (Op op : verify) {
final DispatcherContext context = newContext(op);
assertFalse(verified.get());
dispatcher.dispatch(request, context);
assertTrue(verified.getAndSet(false));
}
} finally {
ContainerMetrics.remove();
}
}
static DispatcherContext newContext(Op op) {
return newContext(op, WriteChunkStage.COMBINED);
}
static DispatcherContext newContext(Op op, WriteChunkStage stage) {
return DispatcherContext.newBuilder(op)
.setTerm(1)
.setLogIndex(1)
.setStage(stage)
.setContainer2BCSIDMap(new HashMap<>())
.build();
}
}
|
googleapis/google-cloud-java | 38,071 | java-storageinsights/google-cloud-storageinsights/src/main/java/com/google/cloud/storageinsights/v1/stub/GrpcStorageInsightsStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.storageinsights.v1.stub;
import static com.google.cloud.storageinsights.v1.StorageInsightsClient.ListDatasetConfigsPagedResponse;
import static com.google.cloud.storageinsights.v1.StorageInsightsClient.ListLocationsPagedResponse;
import static com.google.cloud.storageinsights.v1.StorageInsightsClient.ListReportConfigsPagedResponse;
import static com.google.cloud.storageinsights.v1.StorageInsightsClient.ListReportDetailsPagedResponse;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
import com.google.api.gax.grpc.GrpcStubCallableFactory;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.cloud.storageinsights.v1.CreateDatasetConfigRequest;
import com.google.cloud.storageinsights.v1.CreateReportConfigRequest;
import com.google.cloud.storageinsights.v1.DatasetConfig;
import com.google.cloud.storageinsights.v1.DeleteDatasetConfigRequest;
import com.google.cloud.storageinsights.v1.DeleteReportConfigRequest;
import com.google.cloud.storageinsights.v1.GetDatasetConfigRequest;
import com.google.cloud.storageinsights.v1.GetReportConfigRequest;
import com.google.cloud.storageinsights.v1.GetReportDetailRequest;
import com.google.cloud.storageinsights.v1.LinkDatasetRequest;
import com.google.cloud.storageinsights.v1.LinkDatasetResponse;
import com.google.cloud.storageinsights.v1.ListDatasetConfigsRequest;
import com.google.cloud.storageinsights.v1.ListDatasetConfigsResponse;
import com.google.cloud.storageinsights.v1.ListReportConfigsRequest;
import com.google.cloud.storageinsights.v1.ListReportConfigsResponse;
import com.google.cloud.storageinsights.v1.ListReportDetailsRequest;
import com.google.cloud.storageinsights.v1.ListReportDetailsResponse;
import com.google.cloud.storageinsights.v1.OperationMetadata;
import com.google.cloud.storageinsights.v1.ReportConfig;
import com.google.cloud.storageinsights.v1.ReportDetail;
import com.google.cloud.storageinsights.v1.UnlinkDatasetRequest;
import com.google.cloud.storageinsights.v1.UpdateDatasetConfigRequest;
import com.google.cloud.storageinsights.v1.UpdateReportConfigRequest;
import com.google.longrunning.Operation;
import com.google.longrunning.stub.GrpcOperationsStub;
import com.google.protobuf.Empty;
import io.grpc.MethodDescriptor;
import io.grpc.protobuf.ProtoUtils;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* gRPC stub implementation for the StorageInsights service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class GrpcStorageInsightsStub extends StorageInsightsStub {
private static final MethodDescriptor<ListReportConfigsRequest, ListReportConfigsResponse>
listReportConfigsMethodDescriptor =
MethodDescriptor.<ListReportConfigsRequest, ListReportConfigsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.storageinsights.v1.StorageInsights/ListReportConfigs")
.setRequestMarshaller(
ProtoUtils.marshaller(ListReportConfigsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListReportConfigsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetReportConfigRequest, ReportConfig>
getReportConfigMethodDescriptor =
MethodDescriptor.<GetReportConfigRequest, ReportConfig>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.storageinsights.v1.StorageInsights/GetReportConfig")
.setRequestMarshaller(
ProtoUtils.marshaller(GetReportConfigRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(ReportConfig.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<CreateReportConfigRequest, ReportConfig>
createReportConfigMethodDescriptor =
MethodDescriptor.<CreateReportConfigRequest, ReportConfig>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.storageinsights.v1.StorageInsights/CreateReportConfig")
.setRequestMarshaller(
ProtoUtils.marshaller(CreateReportConfigRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(ReportConfig.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<UpdateReportConfigRequest, ReportConfig>
updateReportConfigMethodDescriptor =
MethodDescriptor.<UpdateReportConfigRequest, ReportConfig>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.storageinsights.v1.StorageInsights/UpdateReportConfig")
.setRequestMarshaller(
ProtoUtils.marshaller(UpdateReportConfigRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(ReportConfig.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<DeleteReportConfigRequest, Empty>
deleteReportConfigMethodDescriptor =
MethodDescriptor.<DeleteReportConfigRequest, Empty>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.storageinsights.v1.StorageInsights/DeleteReportConfig")
.setRequestMarshaller(
ProtoUtils.marshaller(DeleteReportConfigRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ListReportDetailsRequest, ListReportDetailsResponse>
listReportDetailsMethodDescriptor =
MethodDescriptor.<ListReportDetailsRequest, ListReportDetailsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.storageinsights.v1.StorageInsights/ListReportDetails")
.setRequestMarshaller(
ProtoUtils.marshaller(ListReportDetailsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListReportDetailsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetReportDetailRequest, ReportDetail>
getReportDetailMethodDescriptor =
MethodDescriptor.<GetReportDetailRequest, ReportDetail>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.storageinsights.v1.StorageInsights/GetReportDetail")
.setRequestMarshaller(
ProtoUtils.marshaller(GetReportDetailRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(ReportDetail.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ListDatasetConfigsRequest, ListDatasetConfigsResponse>
listDatasetConfigsMethodDescriptor =
MethodDescriptor.<ListDatasetConfigsRequest, ListDatasetConfigsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.storageinsights.v1.StorageInsights/ListDatasetConfigs")
.setRequestMarshaller(
ProtoUtils.marshaller(ListDatasetConfigsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListDatasetConfigsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetDatasetConfigRequest, DatasetConfig>
getDatasetConfigMethodDescriptor =
MethodDescriptor.<GetDatasetConfigRequest, DatasetConfig>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.storageinsights.v1.StorageInsights/GetDatasetConfig")
.setRequestMarshaller(
ProtoUtils.marshaller(GetDatasetConfigRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(DatasetConfig.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<CreateDatasetConfigRequest, Operation>
createDatasetConfigMethodDescriptor =
MethodDescriptor.<CreateDatasetConfigRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.storageinsights.v1.StorageInsights/CreateDatasetConfig")
.setRequestMarshaller(
ProtoUtils.marshaller(CreateDatasetConfigRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<UpdateDatasetConfigRequest, Operation>
updateDatasetConfigMethodDescriptor =
MethodDescriptor.<UpdateDatasetConfigRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.storageinsights.v1.StorageInsights/UpdateDatasetConfig")
.setRequestMarshaller(
ProtoUtils.marshaller(UpdateDatasetConfigRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<DeleteDatasetConfigRequest, Operation>
deleteDatasetConfigMethodDescriptor =
MethodDescriptor.<DeleteDatasetConfigRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.storageinsights.v1.StorageInsights/DeleteDatasetConfig")
.setRequestMarshaller(
ProtoUtils.marshaller(DeleteDatasetConfigRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<LinkDatasetRequest, Operation> linkDatasetMethodDescriptor =
MethodDescriptor.<LinkDatasetRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.storageinsights.v1.StorageInsights/LinkDataset")
.setRequestMarshaller(ProtoUtils.marshaller(LinkDatasetRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<UnlinkDatasetRequest, Operation>
unlinkDatasetMethodDescriptor =
MethodDescriptor.<UnlinkDatasetRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.storageinsights.v1.StorageInsights/UnlinkDataset")
.setRequestMarshaller(
ProtoUtils.marshaller(UnlinkDatasetRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ListLocationsRequest, ListLocationsResponse>
listLocationsMethodDescriptor =
MethodDescriptor.<ListLocationsRequest, ListLocationsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.location.Locations/ListLocations")
.setRequestMarshaller(
ProtoUtils.marshaller(ListLocationsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListLocationsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetLocationRequest, Location> getLocationMethodDescriptor =
MethodDescriptor.<GetLocationRequest, Location>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.location.Locations/GetLocation")
.setRequestMarshaller(ProtoUtils.marshaller(GetLocationRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Location.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private final UnaryCallable<ListReportConfigsRequest, ListReportConfigsResponse>
listReportConfigsCallable;
private final UnaryCallable<ListReportConfigsRequest, ListReportConfigsPagedResponse>
listReportConfigsPagedCallable;
private final UnaryCallable<GetReportConfigRequest, ReportConfig> getReportConfigCallable;
private final UnaryCallable<CreateReportConfigRequest, ReportConfig> createReportConfigCallable;
private final UnaryCallable<UpdateReportConfigRequest, ReportConfig> updateReportConfigCallable;
private final UnaryCallable<DeleteReportConfigRequest, Empty> deleteReportConfigCallable;
private final UnaryCallable<ListReportDetailsRequest, ListReportDetailsResponse>
listReportDetailsCallable;
private final UnaryCallable<ListReportDetailsRequest, ListReportDetailsPagedResponse>
listReportDetailsPagedCallable;
private final UnaryCallable<GetReportDetailRequest, ReportDetail> getReportDetailCallable;
private final UnaryCallable<ListDatasetConfigsRequest, ListDatasetConfigsResponse>
listDatasetConfigsCallable;
private final UnaryCallable<ListDatasetConfigsRequest, ListDatasetConfigsPagedResponse>
listDatasetConfigsPagedCallable;
private final UnaryCallable<GetDatasetConfigRequest, DatasetConfig> getDatasetConfigCallable;
private final UnaryCallable<CreateDatasetConfigRequest, Operation> createDatasetConfigCallable;
private final OperationCallable<CreateDatasetConfigRequest, DatasetConfig, OperationMetadata>
createDatasetConfigOperationCallable;
private final UnaryCallable<UpdateDatasetConfigRequest, Operation> updateDatasetConfigCallable;
private final OperationCallable<UpdateDatasetConfigRequest, DatasetConfig, OperationMetadata>
updateDatasetConfigOperationCallable;
private final UnaryCallable<DeleteDatasetConfigRequest, Operation> deleteDatasetConfigCallable;
private final OperationCallable<DeleteDatasetConfigRequest, Empty, OperationMetadata>
deleteDatasetConfigOperationCallable;
private final UnaryCallable<LinkDatasetRequest, Operation> linkDatasetCallable;
private final OperationCallable<LinkDatasetRequest, LinkDatasetResponse, OperationMetadata>
linkDatasetOperationCallable;
private final UnaryCallable<UnlinkDatasetRequest, Operation> unlinkDatasetCallable;
private final OperationCallable<UnlinkDatasetRequest, Empty, OperationMetadata>
unlinkDatasetOperationCallable;
private final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable;
private final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable;
private final UnaryCallable<GetLocationRequest, Location> getLocationCallable;
private final BackgroundResource backgroundResources;
private final GrpcOperationsStub operationsStub;
private final GrpcStubCallableFactory callableFactory;
public static final GrpcStorageInsightsStub create(StorageInsightsStubSettings settings)
throws IOException {
return new GrpcStorageInsightsStub(settings, ClientContext.create(settings));
}
public static final GrpcStorageInsightsStub create(ClientContext clientContext)
throws IOException {
return new GrpcStorageInsightsStub(
StorageInsightsStubSettings.newBuilder().build(), clientContext);
}
public static final GrpcStorageInsightsStub create(
ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException {
return new GrpcStorageInsightsStub(
StorageInsightsStubSettings.newBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of GrpcStorageInsightsStub, using the given settings. This is protected
* so that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected GrpcStorageInsightsStub(
StorageInsightsStubSettings settings, ClientContext clientContext) throws IOException {
this(settings, clientContext, new GrpcStorageInsightsCallableFactory());
}
/**
* Constructs an instance of GrpcStorageInsightsStub, using the given settings. This is protected
* so that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected GrpcStorageInsightsStub(
StorageInsightsStubSettings settings,
ClientContext clientContext,
GrpcStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory);
GrpcCallSettings<ListReportConfigsRequest, ListReportConfigsResponse>
listReportConfigsTransportSettings =
GrpcCallSettings.<ListReportConfigsRequest, ListReportConfigsResponse>newBuilder()
.setMethodDescriptor(listReportConfigsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<GetReportConfigRequest, ReportConfig> getReportConfigTransportSettings =
GrpcCallSettings.<GetReportConfigRequest, ReportConfig>newBuilder()
.setMethodDescriptor(getReportConfigMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<CreateReportConfigRequest, ReportConfig> createReportConfigTransportSettings =
GrpcCallSettings.<CreateReportConfigRequest, ReportConfig>newBuilder()
.setMethodDescriptor(createReportConfigMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<UpdateReportConfigRequest, ReportConfig> updateReportConfigTransportSettings =
GrpcCallSettings.<UpdateReportConfigRequest, ReportConfig>newBuilder()
.setMethodDescriptor(updateReportConfigMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add(
"report_config.name", String.valueOf(request.getReportConfig().getName()));
return builder.build();
})
.build();
GrpcCallSettings<DeleteReportConfigRequest, Empty> deleteReportConfigTransportSettings =
GrpcCallSettings.<DeleteReportConfigRequest, Empty>newBuilder()
.setMethodDescriptor(deleteReportConfigMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<ListReportDetailsRequest, ListReportDetailsResponse>
listReportDetailsTransportSettings =
GrpcCallSettings.<ListReportDetailsRequest, ListReportDetailsResponse>newBuilder()
.setMethodDescriptor(listReportDetailsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<GetReportDetailRequest, ReportDetail> getReportDetailTransportSettings =
GrpcCallSettings.<GetReportDetailRequest, ReportDetail>newBuilder()
.setMethodDescriptor(getReportDetailMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<ListDatasetConfigsRequest, ListDatasetConfigsResponse>
listDatasetConfigsTransportSettings =
GrpcCallSettings.<ListDatasetConfigsRequest, ListDatasetConfigsResponse>newBuilder()
.setMethodDescriptor(listDatasetConfigsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<GetDatasetConfigRequest, DatasetConfig> getDatasetConfigTransportSettings =
GrpcCallSettings.<GetDatasetConfigRequest, DatasetConfig>newBuilder()
.setMethodDescriptor(getDatasetConfigMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<CreateDatasetConfigRequest, Operation> createDatasetConfigTransportSettings =
GrpcCallSettings.<CreateDatasetConfigRequest, Operation>newBuilder()
.setMethodDescriptor(createDatasetConfigMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<UpdateDatasetConfigRequest, Operation> updateDatasetConfigTransportSettings =
GrpcCallSettings.<UpdateDatasetConfigRequest, Operation>newBuilder()
.setMethodDescriptor(updateDatasetConfigMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add(
"dataset_config.name", String.valueOf(request.getDatasetConfig().getName()));
return builder.build();
})
.build();
GrpcCallSettings<DeleteDatasetConfigRequest, Operation> deleteDatasetConfigTransportSettings =
GrpcCallSettings.<DeleteDatasetConfigRequest, Operation>newBuilder()
.setMethodDescriptor(deleteDatasetConfigMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<LinkDatasetRequest, Operation> linkDatasetTransportSettings =
GrpcCallSettings.<LinkDatasetRequest, Operation>newBuilder()
.setMethodDescriptor(linkDatasetMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<UnlinkDatasetRequest, Operation> unlinkDatasetTransportSettings =
GrpcCallSettings.<UnlinkDatasetRequest, Operation>newBuilder()
.setMethodDescriptor(unlinkDatasetMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<ListLocationsRequest, ListLocationsResponse> listLocationsTransportSettings =
GrpcCallSettings.<ListLocationsRequest, ListLocationsResponse>newBuilder()
.setMethodDescriptor(listLocationsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<GetLocationRequest, Location> getLocationTransportSettings =
GrpcCallSettings.<GetLocationRequest, Location>newBuilder()
.setMethodDescriptor(getLocationMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
this.listReportConfigsCallable =
callableFactory.createUnaryCallable(
listReportConfigsTransportSettings,
settings.listReportConfigsSettings(),
clientContext);
this.listReportConfigsPagedCallable =
callableFactory.createPagedCallable(
listReportConfigsTransportSettings,
settings.listReportConfigsSettings(),
clientContext);
this.getReportConfigCallable =
callableFactory.createUnaryCallable(
getReportConfigTransportSettings, settings.getReportConfigSettings(), clientContext);
this.createReportConfigCallable =
callableFactory.createUnaryCallable(
createReportConfigTransportSettings,
settings.createReportConfigSettings(),
clientContext);
this.updateReportConfigCallable =
callableFactory.createUnaryCallable(
updateReportConfigTransportSettings,
settings.updateReportConfigSettings(),
clientContext);
this.deleteReportConfigCallable =
callableFactory.createUnaryCallable(
deleteReportConfigTransportSettings,
settings.deleteReportConfigSettings(),
clientContext);
this.listReportDetailsCallable =
callableFactory.createUnaryCallable(
listReportDetailsTransportSettings,
settings.listReportDetailsSettings(),
clientContext);
this.listReportDetailsPagedCallable =
callableFactory.createPagedCallable(
listReportDetailsTransportSettings,
settings.listReportDetailsSettings(),
clientContext);
this.getReportDetailCallable =
callableFactory.createUnaryCallable(
getReportDetailTransportSettings, settings.getReportDetailSettings(), clientContext);
this.listDatasetConfigsCallable =
callableFactory.createUnaryCallable(
listDatasetConfigsTransportSettings,
settings.listDatasetConfigsSettings(),
clientContext);
this.listDatasetConfigsPagedCallable =
callableFactory.createPagedCallable(
listDatasetConfigsTransportSettings,
settings.listDatasetConfigsSettings(),
clientContext);
this.getDatasetConfigCallable =
callableFactory.createUnaryCallable(
getDatasetConfigTransportSettings, settings.getDatasetConfigSettings(), clientContext);
this.createDatasetConfigCallable =
callableFactory.createUnaryCallable(
createDatasetConfigTransportSettings,
settings.createDatasetConfigSettings(),
clientContext);
this.createDatasetConfigOperationCallable =
callableFactory.createOperationCallable(
createDatasetConfigTransportSettings,
settings.createDatasetConfigOperationSettings(),
clientContext,
operationsStub);
this.updateDatasetConfigCallable =
callableFactory.createUnaryCallable(
updateDatasetConfigTransportSettings,
settings.updateDatasetConfigSettings(),
clientContext);
this.updateDatasetConfigOperationCallable =
callableFactory.createOperationCallable(
updateDatasetConfigTransportSettings,
settings.updateDatasetConfigOperationSettings(),
clientContext,
operationsStub);
this.deleteDatasetConfigCallable =
callableFactory.createUnaryCallable(
deleteDatasetConfigTransportSettings,
settings.deleteDatasetConfigSettings(),
clientContext);
this.deleteDatasetConfigOperationCallable =
callableFactory.createOperationCallable(
deleteDatasetConfigTransportSettings,
settings.deleteDatasetConfigOperationSettings(),
clientContext,
operationsStub);
this.linkDatasetCallable =
callableFactory.createUnaryCallable(
linkDatasetTransportSettings, settings.linkDatasetSettings(), clientContext);
this.linkDatasetOperationCallable =
callableFactory.createOperationCallable(
linkDatasetTransportSettings,
settings.linkDatasetOperationSettings(),
clientContext,
operationsStub);
this.unlinkDatasetCallable =
callableFactory.createUnaryCallable(
unlinkDatasetTransportSettings, settings.unlinkDatasetSettings(), clientContext);
this.unlinkDatasetOperationCallable =
callableFactory.createOperationCallable(
unlinkDatasetTransportSettings,
settings.unlinkDatasetOperationSettings(),
clientContext,
operationsStub);
this.listLocationsCallable =
callableFactory.createUnaryCallable(
listLocationsTransportSettings, settings.listLocationsSettings(), clientContext);
this.listLocationsPagedCallable =
callableFactory.createPagedCallable(
listLocationsTransportSettings, settings.listLocationsSettings(), clientContext);
this.getLocationCallable =
callableFactory.createUnaryCallable(
getLocationTransportSettings, settings.getLocationSettings(), clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
public GrpcOperationsStub getOperationsStub() {
return operationsStub;
}
@Override
public UnaryCallable<ListReportConfigsRequest, ListReportConfigsResponse>
listReportConfigsCallable() {
return listReportConfigsCallable;
}
@Override
public UnaryCallable<ListReportConfigsRequest, ListReportConfigsPagedResponse>
listReportConfigsPagedCallable() {
return listReportConfigsPagedCallable;
}
@Override
public UnaryCallable<GetReportConfigRequest, ReportConfig> getReportConfigCallable() {
return getReportConfigCallable;
}
@Override
public UnaryCallable<CreateReportConfigRequest, ReportConfig> createReportConfigCallable() {
return createReportConfigCallable;
}
@Override
public UnaryCallable<UpdateReportConfigRequest, ReportConfig> updateReportConfigCallable() {
return updateReportConfigCallable;
}
@Override
public UnaryCallable<DeleteReportConfigRequest, Empty> deleteReportConfigCallable() {
return deleteReportConfigCallable;
}
@Override
public UnaryCallable<ListReportDetailsRequest, ListReportDetailsResponse>
listReportDetailsCallable() {
return listReportDetailsCallable;
}
@Override
public UnaryCallable<ListReportDetailsRequest, ListReportDetailsPagedResponse>
listReportDetailsPagedCallable() {
return listReportDetailsPagedCallable;
}
@Override
public UnaryCallable<GetReportDetailRequest, ReportDetail> getReportDetailCallable() {
return getReportDetailCallable;
}
@Override
public UnaryCallable<ListDatasetConfigsRequest, ListDatasetConfigsResponse>
listDatasetConfigsCallable() {
return listDatasetConfigsCallable;
}
@Override
public UnaryCallable<ListDatasetConfigsRequest, ListDatasetConfigsPagedResponse>
listDatasetConfigsPagedCallable() {
return listDatasetConfigsPagedCallable;
}
@Override
public UnaryCallable<GetDatasetConfigRequest, DatasetConfig> getDatasetConfigCallable() {
return getDatasetConfigCallable;
}
@Override
public UnaryCallable<CreateDatasetConfigRequest, Operation> createDatasetConfigCallable() {
return createDatasetConfigCallable;
}
@Override
public OperationCallable<CreateDatasetConfigRequest, DatasetConfig, OperationMetadata>
createDatasetConfigOperationCallable() {
return createDatasetConfigOperationCallable;
}
@Override
public UnaryCallable<UpdateDatasetConfigRequest, Operation> updateDatasetConfigCallable() {
return updateDatasetConfigCallable;
}
@Override
public OperationCallable<UpdateDatasetConfigRequest, DatasetConfig, OperationMetadata>
updateDatasetConfigOperationCallable() {
return updateDatasetConfigOperationCallable;
}
@Override
public UnaryCallable<DeleteDatasetConfigRequest, Operation> deleteDatasetConfigCallable() {
return deleteDatasetConfigCallable;
}
@Override
public OperationCallable<DeleteDatasetConfigRequest, Empty, OperationMetadata>
deleteDatasetConfigOperationCallable() {
return deleteDatasetConfigOperationCallable;
}
@Override
public UnaryCallable<LinkDatasetRequest, Operation> linkDatasetCallable() {
return linkDatasetCallable;
}
@Override
public OperationCallable<LinkDatasetRequest, LinkDatasetResponse, OperationMetadata>
linkDatasetOperationCallable() {
return linkDatasetOperationCallable;
}
@Override
public UnaryCallable<UnlinkDatasetRequest, Operation> unlinkDatasetCallable() {
return unlinkDatasetCallable;
}
@Override
public OperationCallable<UnlinkDatasetRequest, Empty, OperationMetadata>
unlinkDatasetOperationCallable() {
return unlinkDatasetOperationCallable;
}
@Override
public UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() {
return listLocationsCallable;
}
@Override
public UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse>
listLocationsPagedCallable() {
return listLocationsPagedCallable;
}
@Override
public UnaryCallable<GetLocationRequest, Location> getLocationCallable() {
return getLocationCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
apache/hive | 37,757 | standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PartitionsStatsRequest.java | /**
* Autogenerated by Thrift Compiler (0.16.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.hadoop.hive.metastore.api;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.16.0)")
@org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public class PartitionsStatsRequest implements org.apache.thrift.TBase<PartitionsStatsRequest, PartitionsStatsRequest._Fields>, java.io.Serializable, Cloneable, Comparable<PartitionsStatsRequest> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("PartitionsStatsRequest");
private static final org.apache.thrift.protocol.TField DB_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("dbName", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final org.apache.thrift.protocol.TField TBL_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("tblName", org.apache.thrift.protocol.TType.STRING, (short)2);
private static final org.apache.thrift.protocol.TField COL_NAMES_FIELD_DESC = new org.apache.thrift.protocol.TField("colNames", org.apache.thrift.protocol.TType.LIST, (short)3);
private static final org.apache.thrift.protocol.TField PART_NAMES_FIELD_DESC = new org.apache.thrift.protocol.TField("partNames", org.apache.thrift.protocol.TType.LIST, (short)4);
private static final org.apache.thrift.protocol.TField CAT_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("catName", org.apache.thrift.protocol.TType.STRING, (short)5);
private static final org.apache.thrift.protocol.TField VALID_WRITE_ID_LIST_FIELD_DESC = new org.apache.thrift.protocol.TField("validWriteIdList", org.apache.thrift.protocol.TType.STRING, (short)6);
private static final org.apache.thrift.protocol.TField ENGINE_FIELD_DESC = new org.apache.thrift.protocol.TField("engine", org.apache.thrift.protocol.TType.STRING, (short)7);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new PartitionsStatsRequestStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new PartitionsStatsRequestTupleSchemeFactory();
private @org.apache.thrift.annotation.Nullable java.lang.String dbName; // required
private @org.apache.thrift.annotation.Nullable java.lang.String tblName; // required
private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> colNames; // required
private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> partNames; // required
private @org.apache.thrift.annotation.Nullable java.lang.String catName; // optional
private @org.apache.thrift.annotation.Nullable java.lang.String validWriteIdList; // optional
private @org.apache.thrift.annotation.Nullable java.lang.String engine; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
DB_NAME((short)1, "dbName"),
TBL_NAME((short)2, "tblName"),
COL_NAMES((short)3, "colNames"),
PART_NAMES((short)4, "partNames"),
CAT_NAME((short)5, "catName"),
VALID_WRITE_ID_LIST((short)6, "validWriteIdList"),
ENGINE((short)7, "engine");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // DB_NAME
return DB_NAME;
case 2: // TBL_NAME
return TBL_NAME;
case 3: // COL_NAMES
return COL_NAMES;
case 4: // PART_NAMES
return PART_NAMES;
case 5: // CAT_NAME
return CAT_NAME;
case 6: // VALID_WRITE_ID_LIST
return VALID_WRITE_ID_LIST;
case 7: // ENGINE
return ENGINE;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final _Fields optionals[] = {_Fields.CAT_NAME,_Fields.VALID_WRITE_ID_LIST,_Fields.ENGINE};
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.DB_NAME, new org.apache.thrift.meta_data.FieldMetaData("dbName", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.TBL_NAME, new org.apache.thrift.meta_data.FieldMetaData("tblName", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.COL_NAMES, new org.apache.thrift.meta_data.FieldMetaData("colNames", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
tmpMap.put(_Fields.PART_NAMES, new org.apache.thrift.meta_data.FieldMetaData("partNames", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
tmpMap.put(_Fields.CAT_NAME, new org.apache.thrift.meta_data.FieldMetaData("catName", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.VALID_WRITE_ID_LIST, new org.apache.thrift.meta_data.FieldMetaData("validWriteIdList", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.ENGINE, new org.apache.thrift.meta_data.FieldMetaData("engine", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(PartitionsStatsRequest.class, metaDataMap);
}
public PartitionsStatsRequest() {
this.engine = "hive";
}
public PartitionsStatsRequest(
java.lang.String dbName,
java.lang.String tblName,
java.util.List<java.lang.String> colNames,
java.util.List<java.lang.String> partNames)
{
this();
this.dbName = dbName;
this.tblName = tblName;
this.colNames = colNames;
this.partNames = partNames;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public PartitionsStatsRequest(PartitionsStatsRequest other) {
if (other.isSetDbName()) {
this.dbName = other.dbName;
}
if (other.isSetTblName()) {
this.tblName = other.tblName;
}
if (other.isSetColNames()) {
java.util.List<java.lang.String> __this__colNames = new java.util.ArrayList<java.lang.String>(other.colNames);
this.colNames = __this__colNames;
}
if (other.isSetPartNames()) {
java.util.List<java.lang.String> __this__partNames = new java.util.ArrayList<java.lang.String>(other.partNames);
this.partNames = __this__partNames;
}
if (other.isSetCatName()) {
this.catName = other.catName;
}
if (other.isSetValidWriteIdList()) {
this.validWriteIdList = other.validWriteIdList;
}
if (other.isSetEngine()) {
this.engine = other.engine;
}
}
public PartitionsStatsRequest deepCopy() {
return new PartitionsStatsRequest(this);
}
@Override
public void clear() {
this.dbName = null;
this.tblName = null;
this.colNames = null;
this.partNames = null;
this.catName = null;
this.validWriteIdList = null;
this.engine = "hive";
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getDbName() {
return this.dbName;
}
public void setDbName(@org.apache.thrift.annotation.Nullable java.lang.String dbName) {
this.dbName = dbName;
}
public void unsetDbName() {
this.dbName = null;
}
/** Returns true if field dbName is set (has been assigned a value) and false otherwise */
public boolean isSetDbName() {
return this.dbName != null;
}
public void setDbNameIsSet(boolean value) {
if (!value) {
this.dbName = null;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getTblName() {
return this.tblName;
}
public void setTblName(@org.apache.thrift.annotation.Nullable java.lang.String tblName) {
this.tblName = tblName;
}
public void unsetTblName() {
this.tblName = null;
}
/** Returns true if field tblName is set (has been assigned a value) and false otherwise */
public boolean isSetTblName() {
return this.tblName != null;
}
public void setTblNameIsSet(boolean value) {
if (!value) {
this.tblName = null;
}
}
public int getColNamesSize() {
return (this.colNames == null) ? 0 : this.colNames.size();
}
@org.apache.thrift.annotation.Nullable
public java.util.Iterator<java.lang.String> getColNamesIterator() {
return (this.colNames == null) ? null : this.colNames.iterator();
}
public void addToColNames(java.lang.String elem) {
if (this.colNames == null) {
this.colNames = new java.util.ArrayList<java.lang.String>();
}
this.colNames.add(elem);
}
@org.apache.thrift.annotation.Nullable
public java.util.List<java.lang.String> getColNames() {
return this.colNames;
}
public void setColNames(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> colNames) {
this.colNames = colNames;
}
public void unsetColNames() {
this.colNames = null;
}
/** Returns true if field colNames is set (has been assigned a value) and false otherwise */
public boolean isSetColNames() {
return this.colNames != null;
}
public void setColNamesIsSet(boolean value) {
if (!value) {
this.colNames = null;
}
}
public int getPartNamesSize() {
return (this.partNames == null) ? 0 : this.partNames.size();
}
@org.apache.thrift.annotation.Nullable
public java.util.Iterator<java.lang.String> getPartNamesIterator() {
return (this.partNames == null) ? null : this.partNames.iterator();
}
public void addToPartNames(java.lang.String elem) {
if (this.partNames == null) {
this.partNames = new java.util.ArrayList<java.lang.String>();
}
this.partNames.add(elem);
}
@org.apache.thrift.annotation.Nullable
public java.util.List<java.lang.String> getPartNames() {
return this.partNames;
}
public void setPartNames(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> partNames) {
this.partNames = partNames;
}
public void unsetPartNames() {
this.partNames = null;
}
/** Returns true if field partNames is set (has been assigned a value) and false otherwise */
public boolean isSetPartNames() {
return this.partNames != null;
}
public void setPartNamesIsSet(boolean value) {
if (!value) {
this.partNames = null;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getCatName() {
return this.catName;
}
public void setCatName(@org.apache.thrift.annotation.Nullable java.lang.String catName) {
this.catName = catName;
}
public void unsetCatName() {
this.catName = null;
}
/** Returns true if field catName is set (has been assigned a value) and false otherwise */
public boolean isSetCatName() {
return this.catName != null;
}
public void setCatNameIsSet(boolean value) {
if (!value) {
this.catName = null;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getValidWriteIdList() {
return this.validWriteIdList;
}
public void setValidWriteIdList(@org.apache.thrift.annotation.Nullable java.lang.String validWriteIdList) {
this.validWriteIdList = validWriteIdList;
}
public void unsetValidWriteIdList() {
this.validWriteIdList = null;
}
/** Returns true if field validWriteIdList is set (has been assigned a value) and false otherwise */
public boolean isSetValidWriteIdList() {
return this.validWriteIdList != null;
}
public void setValidWriteIdListIsSet(boolean value) {
if (!value) {
this.validWriteIdList = null;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getEngine() {
return this.engine;
}
public void setEngine(@org.apache.thrift.annotation.Nullable java.lang.String engine) {
this.engine = engine;
}
public void unsetEngine() {
this.engine = null;
}
/** Returns true if field engine is set (has been assigned a value) and false otherwise */
public boolean isSetEngine() {
return this.engine != null;
}
public void setEngineIsSet(boolean value) {
if (!value) {
this.engine = null;
}
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
case DB_NAME:
if (value == null) {
unsetDbName();
} else {
setDbName((java.lang.String)value);
}
break;
case TBL_NAME:
if (value == null) {
unsetTblName();
} else {
setTblName((java.lang.String)value);
}
break;
case COL_NAMES:
if (value == null) {
unsetColNames();
} else {
setColNames((java.util.List<java.lang.String>)value);
}
break;
case PART_NAMES:
if (value == null) {
unsetPartNames();
} else {
setPartNames((java.util.List<java.lang.String>)value);
}
break;
case CAT_NAME:
if (value == null) {
unsetCatName();
} else {
setCatName((java.lang.String)value);
}
break;
case VALID_WRITE_ID_LIST:
if (value == null) {
unsetValidWriteIdList();
} else {
setValidWriteIdList((java.lang.String)value);
}
break;
case ENGINE:
if (value == null) {
unsetEngine();
} else {
setEngine((java.lang.String)value);
}
break;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case DB_NAME:
return getDbName();
case TBL_NAME:
return getTblName();
case COL_NAMES:
return getColNames();
case PART_NAMES:
return getPartNames();
case CAT_NAME:
return getCatName();
case VALID_WRITE_ID_LIST:
return getValidWriteIdList();
case ENGINE:
return getEngine();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case DB_NAME:
return isSetDbName();
case TBL_NAME:
return isSetTblName();
case COL_NAMES:
return isSetColNames();
case PART_NAMES:
return isSetPartNames();
case CAT_NAME:
return isSetCatName();
case VALID_WRITE_ID_LIST:
return isSetValidWriteIdList();
case ENGINE:
return isSetEngine();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof PartitionsStatsRequest)
return this.equals((PartitionsStatsRequest)that);
return false;
}
public boolean equals(PartitionsStatsRequest that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_dbName = true && this.isSetDbName();
boolean that_present_dbName = true && that.isSetDbName();
if (this_present_dbName || that_present_dbName) {
if (!(this_present_dbName && that_present_dbName))
return false;
if (!this.dbName.equals(that.dbName))
return false;
}
boolean this_present_tblName = true && this.isSetTblName();
boolean that_present_tblName = true && that.isSetTblName();
if (this_present_tblName || that_present_tblName) {
if (!(this_present_tblName && that_present_tblName))
return false;
if (!this.tblName.equals(that.tblName))
return false;
}
boolean this_present_colNames = true && this.isSetColNames();
boolean that_present_colNames = true && that.isSetColNames();
if (this_present_colNames || that_present_colNames) {
if (!(this_present_colNames && that_present_colNames))
return false;
if (!this.colNames.equals(that.colNames))
return false;
}
boolean this_present_partNames = true && this.isSetPartNames();
boolean that_present_partNames = true && that.isSetPartNames();
if (this_present_partNames || that_present_partNames) {
if (!(this_present_partNames && that_present_partNames))
return false;
if (!this.partNames.equals(that.partNames))
return false;
}
boolean this_present_catName = true && this.isSetCatName();
boolean that_present_catName = true && that.isSetCatName();
if (this_present_catName || that_present_catName) {
if (!(this_present_catName && that_present_catName))
return false;
if (!this.catName.equals(that.catName))
return false;
}
boolean this_present_validWriteIdList = true && this.isSetValidWriteIdList();
boolean that_present_validWriteIdList = true && that.isSetValidWriteIdList();
if (this_present_validWriteIdList || that_present_validWriteIdList) {
if (!(this_present_validWriteIdList && that_present_validWriteIdList))
return false;
if (!this.validWriteIdList.equals(that.validWriteIdList))
return false;
}
boolean this_present_engine = true && this.isSetEngine();
boolean that_present_engine = true && that.isSetEngine();
if (this_present_engine || that_present_engine) {
if (!(this_present_engine && that_present_engine))
return false;
if (!this.engine.equals(that.engine))
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((isSetDbName()) ? 131071 : 524287);
if (isSetDbName())
hashCode = hashCode * 8191 + dbName.hashCode();
hashCode = hashCode * 8191 + ((isSetTblName()) ? 131071 : 524287);
if (isSetTblName())
hashCode = hashCode * 8191 + tblName.hashCode();
hashCode = hashCode * 8191 + ((isSetColNames()) ? 131071 : 524287);
if (isSetColNames())
hashCode = hashCode * 8191 + colNames.hashCode();
hashCode = hashCode * 8191 + ((isSetPartNames()) ? 131071 : 524287);
if (isSetPartNames())
hashCode = hashCode * 8191 + partNames.hashCode();
hashCode = hashCode * 8191 + ((isSetCatName()) ? 131071 : 524287);
if (isSetCatName())
hashCode = hashCode * 8191 + catName.hashCode();
hashCode = hashCode * 8191 + ((isSetValidWriteIdList()) ? 131071 : 524287);
if (isSetValidWriteIdList())
hashCode = hashCode * 8191 + validWriteIdList.hashCode();
hashCode = hashCode * 8191 + ((isSetEngine()) ? 131071 : 524287);
if (isSetEngine())
hashCode = hashCode * 8191 + engine.hashCode();
return hashCode;
}
@Override
public int compareTo(PartitionsStatsRequest other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.compare(isSetDbName(), other.isSetDbName());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetDbName()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dbName, other.dbName);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetTblName(), other.isSetTblName());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetTblName()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.tblName, other.tblName);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetColNames(), other.isSetColNames());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetColNames()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.colNames, other.colNames);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetPartNames(), other.isSetPartNames());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetPartNames()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.partNames, other.partNames);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetCatName(), other.isSetCatName());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetCatName()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.catName, other.catName);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetValidWriteIdList(), other.isSetValidWriteIdList());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetValidWriteIdList()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.validWriteIdList, other.validWriteIdList);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetEngine(), other.isSetEngine());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetEngine()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.engine, other.engine);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("PartitionsStatsRequest(");
boolean first = true;
sb.append("dbName:");
if (this.dbName == null) {
sb.append("null");
} else {
sb.append(this.dbName);
}
first = false;
if (!first) sb.append(", ");
sb.append("tblName:");
if (this.tblName == null) {
sb.append("null");
} else {
sb.append(this.tblName);
}
first = false;
if (!first) sb.append(", ");
sb.append("colNames:");
if (this.colNames == null) {
sb.append("null");
} else {
sb.append(this.colNames);
}
first = false;
if (!first) sb.append(", ");
sb.append("partNames:");
if (this.partNames == null) {
sb.append("null");
} else {
sb.append(this.partNames);
}
first = false;
if (isSetCatName()) {
if (!first) sb.append(", ");
sb.append("catName:");
if (this.catName == null) {
sb.append("null");
} else {
sb.append(this.catName);
}
first = false;
}
if (isSetValidWriteIdList()) {
if (!first) sb.append(", ");
sb.append("validWriteIdList:");
if (this.validWriteIdList == null) {
sb.append("null");
} else {
sb.append(this.validWriteIdList);
}
first = false;
}
if (isSetEngine()) {
if (!first) sb.append(", ");
sb.append("engine:");
if (this.engine == null) {
sb.append("null");
} else {
sb.append(this.engine);
}
first = false;
}
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if (!isSetDbName()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'dbName' is unset! Struct:" + toString());
}
if (!isSetTblName()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'tblName' is unset! Struct:" + toString());
}
if (!isSetColNames()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'colNames' is unset! Struct:" + toString());
}
if (!isSetPartNames()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'partNames' is unset! Struct:" + toString());
}
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class PartitionsStatsRequestStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public PartitionsStatsRequestStandardScheme getScheme() {
return new PartitionsStatsRequestStandardScheme();
}
}
private static class PartitionsStatsRequestStandardScheme extends org.apache.thrift.scheme.StandardScheme<PartitionsStatsRequest> {
public void read(org.apache.thrift.protocol.TProtocol iprot, PartitionsStatsRequest struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // DB_NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.dbName = iprot.readString();
struct.setDbNameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // TBL_NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.tblName = iprot.readString();
struct.setTblNameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // COL_NAMES
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list626 = iprot.readListBegin();
struct.colNames = new java.util.ArrayList<java.lang.String>(_list626.size);
@org.apache.thrift.annotation.Nullable java.lang.String _elem627;
for (int _i628 = 0; _i628 < _list626.size; ++_i628)
{
_elem627 = iprot.readString();
struct.colNames.add(_elem627);
}
iprot.readListEnd();
}
struct.setColNamesIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 4: // PART_NAMES
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list629 = iprot.readListBegin();
struct.partNames = new java.util.ArrayList<java.lang.String>(_list629.size);
@org.apache.thrift.annotation.Nullable java.lang.String _elem630;
for (int _i631 = 0; _i631 < _list629.size; ++_i631)
{
_elem630 = iprot.readString();
struct.partNames.add(_elem630);
}
iprot.readListEnd();
}
struct.setPartNamesIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 5: // CAT_NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.catName = iprot.readString();
struct.setCatNameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 6: // VALID_WRITE_ID_LIST
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.validWriteIdList = iprot.readString();
struct.setValidWriteIdListIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 7: // ENGINE
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.engine = iprot.readString();
struct.setEngineIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, PartitionsStatsRequest struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.dbName != null) {
oprot.writeFieldBegin(DB_NAME_FIELD_DESC);
oprot.writeString(struct.dbName);
oprot.writeFieldEnd();
}
if (struct.tblName != null) {
oprot.writeFieldBegin(TBL_NAME_FIELD_DESC);
oprot.writeString(struct.tblName);
oprot.writeFieldEnd();
}
if (struct.colNames != null) {
oprot.writeFieldBegin(COL_NAMES_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.colNames.size()));
for (java.lang.String _iter632 : struct.colNames)
{
oprot.writeString(_iter632);
}
oprot.writeListEnd();
}
oprot.writeFieldEnd();
}
if (struct.partNames != null) {
oprot.writeFieldBegin(PART_NAMES_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.partNames.size()));
for (java.lang.String _iter633 : struct.partNames)
{
oprot.writeString(_iter633);
}
oprot.writeListEnd();
}
oprot.writeFieldEnd();
}
if (struct.catName != null) {
if (struct.isSetCatName()) {
oprot.writeFieldBegin(CAT_NAME_FIELD_DESC);
oprot.writeString(struct.catName);
oprot.writeFieldEnd();
}
}
if (struct.validWriteIdList != null) {
if (struct.isSetValidWriteIdList()) {
oprot.writeFieldBegin(VALID_WRITE_ID_LIST_FIELD_DESC);
oprot.writeString(struct.validWriteIdList);
oprot.writeFieldEnd();
}
}
if (struct.engine != null) {
if (struct.isSetEngine()) {
oprot.writeFieldBegin(ENGINE_FIELD_DESC);
oprot.writeString(struct.engine);
oprot.writeFieldEnd();
}
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class PartitionsStatsRequestTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public PartitionsStatsRequestTupleScheme getScheme() {
return new PartitionsStatsRequestTupleScheme();
}
}
private static class PartitionsStatsRequestTupleScheme extends org.apache.thrift.scheme.TupleScheme<PartitionsStatsRequest> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, PartitionsStatsRequest struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
oprot.writeString(struct.dbName);
oprot.writeString(struct.tblName);
{
oprot.writeI32(struct.colNames.size());
for (java.lang.String _iter634 : struct.colNames)
{
oprot.writeString(_iter634);
}
}
{
oprot.writeI32(struct.partNames.size());
for (java.lang.String _iter635 : struct.partNames)
{
oprot.writeString(_iter635);
}
}
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetCatName()) {
optionals.set(0);
}
if (struct.isSetValidWriteIdList()) {
optionals.set(1);
}
if (struct.isSetEngine()) {
optionals.set(2);
}
oprot.writeBitSet(optionals, 3);
if (struct.isSetCatName()) {
oprot.writeString(struct.catName);
}
if (struct.isSetValidWriteIdList()) {
oprot.writeString(struct.validWriteIdList);
}
if (struct.isSetEngine()) {
oprot.writeString(struct.engine);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, PartitionsStatsRequest struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
struct.dbName = iprot.readString();
struct.setDbNameIsSet(true);
struct.tblName = iprot.readString();
struct.setTblNameIsSet(true);
{
org.apache.thrift.protocol.TList _list636 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRING);
struct.colNames = new java.util.ArrayList<java.lang.String>(_list636.size);
@org.apache.thrift.annotation.Nullable java.lang.String _elem637;
for (int _i638 = 0; _i638 < _list636.size; ++_i638)
{
_elem637 = iprot.readString();
struct.colNames.add(_elem637);
}
}
struct.setColNamesIsSet(true);
{
org.apache.thrift.protocol.TList _list639 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRING);
struct.partNames = new java.util.ArrayList<java.lang.String>(_list639.size);
@org.apache.thrift.annotation.Nullable java.lang.String _elem640;
for (int _i641 = 0; _i641 < _list639.size; ++_i641)
{
_elem640 = iprot.readString();
struct.partNames.add(_elem640);
}
}
struct.setPartNamesIsSet(true);
java.util.BitSet incoming = iprot.readBitSet(3);
if (incoming.get(0)) {
struct.catName = iprot.readString();
struct.setCatNameIsSet(true);
}
if (incoming.get(1)) {
struct.validWriteIdList = iprot.readString();
struct.setValidWriteIdListIsSet(true);
}
if (incoming.get(2)) {
struct.engine = iprot.readString();
struct.setEngineIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
|
googleapis/google-cloud-java | 37,611 | java-alloydb/proto-google-cloud-alloydb-v1/src/main/java/com/google/cloud/alloydb/v1/ListBackupsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/alloydb/v1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.alloydb.v1;
/**
*
*
* <pre>
* Message for requesting list of Backups
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1.ListBackupsRequest}
*/
public final class ListBackupsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1.ListBackupsRequest)
ListBackupsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListBackupsRequest.newBuilder() to construct.
private ListBackupsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListBackupsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
orderBy_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListBackupsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1.ServiceProto
.internal_static_google_cloud_alloydb_v1_ListBackupsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1.ServiceProto
.internal_static_google_cloud_alloydb_v1_ListBackupsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1.ListBackupsRequest.class,
com.google.cloud.alloydb.v1.ListBackupsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for ListBackupsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListBackupsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ORDER_BY_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
@java.lang.Override
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
}
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, orderBy_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, orderBy_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.alloydb.v1.ListBackupsRequest)) {
return super.equals(obj);
}
com.google.cloud.alloydb.v1.ListBackupsRequest other =
(com.google.cloud.alloydb.v1.ListBackupsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getOrderBy().equals(other.getOrderBy())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + ORDER_BY_FIELD_NUMBER;
hash = (53 * hash) + getOrderBy().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.alloydb.v1.ListBackupsRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1.ListBackupsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1.ListBackupsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1.ListBackupsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1.ListBackupsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1.ListBackupsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1.ListBackupsRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1.ListBackupsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1.ListBackupsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1.ListBackupsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1.ListBackupsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1.ListBackupsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.alloydb.v1.ListBackupsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for requesting list of Backups
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1.ListBackupsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1.ListBackupsRequest)
com.google.cloud.alloydb.v1.ListBackupsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1.ServiceProto
.internal_static_google_cloud_alloydb_v1_ListBackupsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1.ServiceProto
.internal_static_google_cloud_alloydb_v1_ListBackupsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1.ListBackupsRequest.class,
com.google.cloud.alloydb.v1.ListBackupsRequest.Builder.class);
}
// Construct using com.google.cloud.alloydb.v1.ListBackupsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
orderBy_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.alloydb.v1.ServiceProto
.internal_static_google_cloud_alloydb_v1_ListBackupsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.alloydb.v1.ListBackupsRequest getDefaultInstanceForType() {
return com.google.cloud.alloydb.v1.ListBackupsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.alloydb.v1.ListBackupsRequest build() {
com.google.cloud.alloydb.v1.ListBackupsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.alloydb.v1.ListBackupsRequest buildPartial() {
com.google.cloud.alloydb.v1.ListBackupsRequest result =
new com.google.cloud.alloydb.v1.ListBackupsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.alloydb.v1.ListBackupsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.orderBy_ = orderBy_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.alloydb.v1.ListBackupsRequest) {
return mergeFrom((com.google.cloud.alloydb.v1.ListBackupsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.alloydb.v1.ListBackupsRequest other) {
if (other == com.google.cloud.alloydb.v1.ListBackupsRequest.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
if (!other.getOrderBy().isEmpty()) {
orderBy_ = other.orderBy_;
bitField0_ |= 0x00000010;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
case 42:
{
orderBy_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000010;
break;
} // case 42
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for ListBackupsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListBackupsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListBackupsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for ListBackupsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for ListBackupsRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Filtering results
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
private java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderBy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearOrderBy() {
orderBy_ = getDefaultInstance().getOrderBy();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
return this;
}
/**
*
*
* <pre>
* Hint for how to order the results
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The bytes for orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderByBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1.ListBackupsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1.ListBackupsRequest)
private static final com.google.cloud.alloydb.v1.ListBackupsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1.ListBackupsRequest();
}
public static com.google.cloud.alloydb.v1.ListBackupsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListBackupsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListBackupsRequest>() {
@java.lang.Override
public ListBackupsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListBackupsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListBackupsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.alloydb.v1.ListBackupsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/tez | 38,011 | tez-mapreduce/src/main/java/org/apache/tez/mapreduce/hadoop/MRInputHelpers.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.mapreduce.hadoop;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.serializer.Deserializer;
import org.apache.hadoop.io.serializer.SerializationFactory;
import org.apache.hadoop.io.serializer.Serializer;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobSubmissionFiles;
import org.apache.hadoop.mapreduce.split.JobSplitWriter;
import org.apache.hadoop.mapreduce.split.TezGroupedSplit;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.tez.common.Preconditions;
import org.apache.tez.common.TezUtils;
import org.apache.tez.common.io.NonSyncDataOutputStream;
import org.apache.tez.dag.api.DataSourceDescriptor;
import org.apache.tez.dag.api.InputDescriptor;
import org.apache.tez.dag.api.TaskLocationHint;
import org.apache.tez.dag.api.TezUncheckedException;
import org.apache.tez.dag.api.UserPayload;
import org.apache.tez.dag.api.VertexLocationHint;
import org.apache.tez.mapreduce.input.MRInput;
import org.apache.tez.mapreduce.input.MRInputLegacy;
import org.apache.tez.mapreduce.protos.MRRuntimeProtos;
import org.apache.tez.mapreduce.protos.MRRuntimeProtos.MRSplitProto;
import org.apache.tez.runtime.api.InputContext;
import org.apache.tez.runtime.api.events.InputDataInformationEvent;
import com.google.common.base.Function;
import com.google.common.base.Strings;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.protobuf.ByteString;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Public
@Unstable
public class MRInputHelpers {
private static final Logger LOG = LoggerFactory.getLogger(MRInputHelpers.class);
private static final int SPLIT_SERIALIZED_LENGTH_ESTIMATE = 40;
static final String JOB_SPLIT_RESOURCE_NAME = "job.split";
static final String JOB_SPLIT_METAINFO_RESOURCE_NAME = "job.splitmetainfo";
protected MRInputHelpers() {}
/**
* Setup split generation on the client, with splits being distributed via the traditional
* MapReduce mechanism of distributing splits via the Distributed Cache.
* <p/>
* Usage of this technique for handling splits is not advised. Instead, splits should be either
* generated in the AM, or generated in the client and distributed via the AM. See {@link
* org.apache.tez.mapreduce.input.MRInput.MRInputConfigBuilder}
* <p/>
* Note: Attempting to use this method to add multiple Inputs to a Vertex is not supported.
*
* This mechanism of propagating splits may be removed in a subsequent release, and is not recommended.
*
* @param conf configuration to be used by {@link org.apache.tez.mapreduce.input.MRInput}.
* This is expected to be fully configured.
* @param splitsDir the path to which splits will be generated.
* @param useLegacyInput whether to use {@link org.apache.tez.mapreduce.input.MRInputLegacy} or
* {@link org.apache.tez.mapreduce.input.MRInput}
* @return an instance of {@link org.apache.tez.dag.api.DataSourceDescriptor} which can be added
* as a data source to a {@link org.apache.tez.dag.api.Vertex}
*/
@InterfaceStability.Unstable
@InterfaceAudience.LimitedPrivate({"hive, pig"})
public static DataSourceDescriptor configureMRInputWithLegacySplitGeneration(Configuration conf,
Path splitsDir,
boolean useLegacyInput) {
InputSplitInfo inputSplitInfo;
try {
inputSplitInfo = generateInputSplits(conf, splitsDir);
InputDescriptor inputDescriptor = InputDescriptor.create(useLegacyInput ? MRInputLegacy.class
.getName() : MRInput.class.getName())
.setUserPayload(createMRInputPayload(conf, null, false, true));
Map<String, LocalResource> additionalLocalResources = new HashMap<String, LocalResource>();
updateLocalResourcesForInputSplits(conf, inputSplitInfo,
additionalLocalResources);
return DataSourceDescriptor.create(inputDescriptor, null, inputSplitInfo.getNumTasks(),
inputSplitInfo.getCredentials(),
VertexLocationHint.create(inputSplitInfo.getTaskLocationHints()),
additionalLocalResources);
} catch (IOException | InterruptedException | ClassNotFoundException e) {
throw new TezUncheckedException("Failed to generate InputSplits", e);
}
}
/**
* Parse the payload used by MRInputPayload
*
* @param payload the {@link org.apache.tez.dag.api.UserPayload} instance
* @return an instance of {@link org.apache.tez.mapreduce.protos.MRRuntimeProtos.MRInputUserPayloadProto},
* which provides access to the underlying configuration bytes
*/
@InterfaceStability.Evolving
@InterfaceAudience.LimitedPrivate({"hive, pig"})
public static MRRuntimeProtos.MRInputUserPayloadProto parseMRInputPayload(UserPayload payload)
throws IOException {
return MRRuntimeProtos.MRInputUserPayloadProto.parseFrom(ByteString.copyFrom(payload.getPayload()));
}
/**
* Create an instance of {@link org.apache.hadoop.mapred.InputSplit} from the {@link
* org.apache.tez.mapreduce.input.MRInput} representation of a split.
*
* @param splitProto The {@link org.apache.tez.mapreduce.protos.MRRuntimeProtos.MRSplitProto}
* instance representing the split
* @param serializationFactory the serialization mechanism used to write out the split
* @return an instance of the split
*/
@SuppressWarnings("unchecked")
@InterfaceStability.Evolving
@InterfaceAudience.LimitedPrivate({"hive, pig"})
public static InputSplit createOldFormatSplitFromUserPayload(
MRRuntimeProtos.MRSplitProto splitProto, SerializationFactory serializationFactory)
throws IOException {
// This may not need to use serialization factory, since OldFormat
// always uses Writable to write splits.
Objects.requireNonNull(splitProto, "splitProto cannot be null");
String className = splitProto.getSplitClassName();
Class<InputSplit> clazz;
try {
clazz = (Class<InputSplit>) Class.forName(className);
} catch (ClassNotFoundException e) {
throw new IOException("Failed to load InputSplit class: [" + className + "]", e);
}
Deserializer<InputSplit> deserializer = serializationFactory
.getDeserializer(clazz);
deserializer.open(splitProto.getSplitBytes().newInput());
InputSplit inputSplit = deserializer.deserialize(null);
deserializer.close();
return inputSplit;
}
/**
* Create an instance of {@link org.apache.hadoop.mapreduce.InputSplit} from the {@link
* org.apache.tez.mapreduce.input.MRInput} representation of a split.
*
* @param splitProto The {@link org.apache.tez.mapreduce.protos.MRRuntimeProtos.MRSplitProto}
* instance representing the split
* @param serializationFactory the serialization mechanism used to write out the split
* @return an instance of the split
*/
@InterfaceStability.Evolving
@SuppressWarnings("unchecked")
public static org.apache.hadoop.mapreduce.InputSplit createNewFormatSplitFromUserPayload(
MRRuntimeProtos.MRSplitProto splitProto, SerializationFactory serializationFactory)
throws IOException {
Objects.requireNonNull(splitProto, "splitProto must be specified");
String className = splitProto.getSplitClassName();
Class<org.apache.hadoop.mapreduce.InputSplit> clazz;
try {
clazz = (Class<org.apache.hadoop.mapreduce.InputSplit>) Class
.forName(className);
} catch (ClassNotFoundException e) {
throw new IOException("Failed to load InputSplit class: [" + className + "]", e);
}
Deserializer<org.apache.hadoop.mapreduce.InputSplit> deserializer = serializationFactory
.getDeserializer(clazz);
deserializer.open(splitProto.getSplitBytes().newInput());
org.apache.hadoop.mapreduce.InputSplit inputSplit = deserializer
.deserialize(null);
deserializer.close();
return inputSplit;
}
@InterfaceStability.Evolving
public static <T extends org.apache.hadoop.mapreduce.InputSplit> MRRuntimeProtos.MRSplitProto createSplitProto(
T newSplit, SerializationFactory serializationFactory)
throws IOException {
MRRuntimeProtos.MRSplitProto.Builder builder = MRRuntimeProtos.MRSplitProto
.newBuilder();
builder.setSplitClassName(newSplit.getClass().getName());
@SuppressWarnings("unchecked")
Serializer<T> serializer = serializationFactory
.getSerializer((Class<T>) newSplit.getClass());
ByteString.Output out = ByteString
.newOutput(SPLIT_SERIALIZED_LENGTH_ESTIMATE);
serializer.open(out);
serializer.serialize(newSplit);
// TODO MR Compat: Check against max block locations per split.
ByteString splitBs = out.toByteString();
builder.setSplitBytes(splitBs);
return builder.build();
}
@InterfaceStability.Evolving
@InterfaceAudience.LimitedPrivate({"hive, pig"})
public static MRRuntimeProtos.MRSplitProto createSplitProto(
org.apache.hadoop.mapred.InputSplit oldSplit) throws IOException {
MRRuntimeProtos.MRSplitProto.Builder builder = MRRuntimeProtos.MRSplitProto.newBuilder();
builder.setSplitClassName(oldSplit.getClass().getName());
ByteString.Output os = ByteString
.newOutput(SPLIT_SERIALIZED_LENGTH_ESTIMATE);
oldSplit.write(new NonSyncDataOutputStream(os));
ByteString splitBs = os.toByteString();
builder.setSplitBytes(splitBs);
return builder.build();
}
/**
* Generates Input splits and stores them in a {@link org.apache.tez.mapreduce.protos.MRRuntimeProtos.MRSplitsProto} instance.
*
* Returns an instance of {@link InputSplitInfoMem}
*
* With grouping enabled, the eventual configuration used by the tasks, will have
* the user-specified InputFormat replaced by either {@link org.apache.hadoop.mapred.split.TezGroupedSplitsInputFormat}
* or {@link org.apache.hadoop.mapreduce.split.TezGroupedSplitsInputFormat}
*
* @param conf
* an instance of Configuration which is used to determine whether
* the mapred of mapreduce API is being used. This Configuration
* instance should also contain adequate information to be able to
* generate splits - like the InputFormat being used and related
* configuration.
* @param groupSplits whether to group the splits or not
* @param targetTasks the number of target tasks if grouping is enabled. Specify as 0 otherwise.
* @return an instance of {@link InputSplitInfoMem} which supports a subset of
* the APIs defined on {@link InputSplitInfo}
*/
@InterfaceStability.Unstable
@InterfaceAudience.LimitedPrivate({"hive, pig"})
public static InputSplitInfoMem generateInputSplitsToMem(Configuration conf,
boolean groupSplits, int targetTasks)
throws IOException, ClassNotFoundException, InterruptedException {
return generateInputSplitsToMem(conf, groupSplits, true, targetTasks);
}
/**
* Generates Input splits and stores them in a {@link org.apache.tez.mapreduce.protos.MRRuntimeProtos.MRSplitsProto} instance.
*
* Returns an instance of {@link InputSplitInfoMem}
*
* With grouping enabled, the eventual configuration used by the tasks, will have
* the user-specified InputFormat replaced by either {@link org.apache.hadoop.mapred.split.TezGroupedSplitsInputFormat}
* or {@link org.apache.hadoop.mapreduce.split.TezGroupedSplitsInputFormat}
*
* @param conf
* an instance of Configuration which is used to determine whether
* the mapred of mapreduce API is being used. This Configuration
* instance should also contain adequate information to be able to
* generate splits - like the InputFormat being used and related
* configuration.
* @param groupSplits whether to group the splits or not
* @param sortSplits whether to sort the splits or not
* @param targetTasks the number of target tasks if grouping is enabled. Specify as 0 otherwise.
* @return an instance of {@link InputSplitInfoMem} which supports a subset of
* the APIs defined on {@link InputSplitInfo}
*/
@InterfaceStability.Unstable
public static InputSplitInfoMem generateInputSplitsToMem(Configuration conf,
boolean groupSplits, boolean sortSplits, int targetTasks)
throws IOException, ClassNotFoundException, InterruptedException {
InputSplitInfoMem splitInfoMem;
JobConf jobConf = new JobConf(conf);
if (jobConf.getUseNewMapper()) {
LOG.debug("Generating mapreduce api input splits");
Job job = Job.getInstance(conf);
org.apache.hadoop.mapreduce.InputSplit[] splits =
generateNewSplits(job, groupSplits, sortSplits, targetTasks);
splitInfoMem = new InputSplitInfoMem(splits, createTaskLocationHintsFromSplits(splits),
splits.length, job.getCredentials(), job.getConfiguration());
} else {
LOG.debug("Generating mapred api input splits");
org.apache.hadoop.mapred.InputSplit[] splits =
generateOldSplits(jobConf, groupSplits, sortSplits, targetTasks);
splitInfoMem = new InputSplitInfoMem(splits, createTaskLocationHintsFromSplits(splits),
splits.length, jobConf.getCredentials(), jobConf);
}
LOG.info("NumSplits: " + splitInfoMem.getNumTasks() + ", SerializedSize: "
+ splitInfoMem.getSplitsProto().getSerializedSize());
return splitInfoMem;
}
private static List<TaskLocationHint> createTaskLocationHintsFromSplits(
org.apache.hadoop.mapreduce.InputSplit[] newFormatSplits) {
Iterable<TaskLocationHint> iterable = Iterables
.transform(Arrays.asList(newFormatSplits),
new Function<org.apache.hadoop.mapreduce.InputSplit, TaskLocationHint>() {
@Override
public TaskLocationHint apply(
org.apache.hadoop.mapreduce.InputSplit input) {
try {
if (input instanceof TezGroupedSplit) {
String rack =
((org.apache.hadoop.mapreduce.split.TezGroupedSplit) input).getRack();
if (rack == null) {
if (input.getLocations() != null) {
return TaskLocationHint.createTaskLocationHint(
new HashSet<>(Arrays.asList(input.getLocations())), null);
} else {
return TaskLocationHint.createTaskLocationHint(null, null);
}
} else {
return TaskLocationHint.createTaskLocationHint(null,
Collections.singleton(rack));
}
} else {
return TaskLocationHint.createTaskLocationHint(
new HashSet<>(Arrays.asList(input.getLocations())), null);
}
} catch (IOException e) {
throw new RuntimeException(e);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
});
return Lists.newArrayList(iterable);
}
private static List<TaskLocationHint> createTaskLocationHintsFromSplits(
org.apache.hadoop.mapred.InputSplit[] oldFormatSplits) {
Iterable<TaskLocationHint> iterable = Iterables.transform(Arrays.asList(oldFormatSplits),
new Function<org.apache.hadoop.mapred.InputSplit, TaskLocationHint>() {
@Override
public TaskLocationHint apply(org.apache.hadoop.mapred.InputSplit input) {
try {
if (input instanceof org.apache.hadoop.mapred.split.TezGroupedSplit) {
String rack = ((org.apache.hadoop.mapred.split.TezGroupedSplit) input).getRack();
if (rack == null) {
if (input.getLocations() != null) {
return TaskLocationHint.createTaskLocationHint(new HashSet<String>(Arrays.asList(
input.getLocations())), null);
} else {
return TaskLocationHint.createTaskLocationHint(null, null);
}
} else {
return TaskLocationHint.createTaskLocationHint(null, Collections.singleton(rack));
}
} else {
return TaskLocationHint.createTaskLocationHint(
new HashSet<>(Arrays.asList(input.getLocations())),
null);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
});
return Lists.newArrayList(iterable);
}
@SuppressWarnings({ "rawtypes", "unchecked" })
private static org.apache.hadoop.mapreduce.InputSplit[] generateNewSplits(
JobContext jobContext, boolean groupSplits, boolean sortSplits,
int numTasks) throws IOException,
InterruptedException {
Configuration conf = jobContext.getConfiguration();
// This is the real input format.
org.apache.hadoop.mapreduce.InputFormat<?, ?> inputFormat;
try {
inputFormat = ReflectionUtils.newInstance(jobContext.getInputFormatClass(), conf);
} catch (ClassNotFoundException e) {
throw new TezUncheckedException(e);
}
org.apache.hadoop.mapreduce.InputFormat<?, ?> finalInputFormat;
// For grouping, the underlying InputFormatClass class is passed in as a parameter.
// JobContext has this setup as TezGroupedSplitInputFormat
if (groupSplits) {
org.apache.hadoop.mapreduce.split.TezGroupedSplitsInputFormat groupedFormat =
new org.apache.hadoop.mapreduce.split.TezGroupedSplitsInputFormat();
groupedFormat.setConf(conf);
groupedFormat.setInputFormat(inputFormat);
groupedFormat.setDesiredNumberOfSplits(numTasks);
finalInputFormat = groupedFormat;
} else {
finalInputFormat = inputFormat;
}
List<org.apache.hadoop.mapreduce.InputSplit> array = finalInputFormat
.getSplits(jobContext);
org.apache.hadoop.mapreduce.InputSplit[] splits = array
.toArray(new org.apache.hadoop.mapreduce.InputSplit[array.size()]);
if (sortSplits) {
// sort the splits into order based on size, so that the biggest
// go first
Arrays.sort(splits, new InputSplitComparator());
} else {
Collections.shuffle(Arrays.asList(splits));
}
return splits;
}
@SuppressWarnings({ "rawtypes", "unchecked" })
private static org.apache.hadoop.mapred.InputSplit[] generateOldSplits(
JobConf jobConf, boolean groupSplits, boolean sortSplits, int numTasks)
throws IOException {
// This is the real InputFormat
org.apache.hadoop.mapred.InputFormat inputFormat;
try {
inputFormat = jobConf.getInputFormat();
} catch (Exception e) {
throw new TezUncheckedException(e);
}
org.apache.hadoop.mapred.InputFormat finalInputFormat;
if (groupSplits) {
org.apache.hadoop.mapred.split.TezGroupedSplitsInputFormat groupedFormat =
new org.apache.hadoop.mapred.split.TezGroupedSplitsInputFormat();
groupedFormat.setConf(jobConf);
groupedFormat.setInputFormat(inputFormat);
groupedFormat.setDesiredNumberOfSplits(numTasks);
finalInputFormat = groupedFormat;
} else {
finalInputFormat = inputFormat;
}
org.apache.hadoop.mapred.InputSplit[] splits = finalInputFormat
.getSplits(jobConf, jobConf.getNumMapTasks());
if (sortSplits) {
// sort the splits into order based on size, so that the biggest
// go first
Arrays.sort(splits, new OldInputSplitComparator());
}
return splits;
}
/**
* Comparator for org.apache.hadoop.mapreduce.InputSplit
*/
private static class InputSplitComparator
implements Comparator<org.apache.hadoop.mapreduce.InputSplit> {
@Override
public int compare(org.apache.hadoop.mapreduce.InputSplit o1,
org.apache.hadoop.mapreduce.InputSplit o2) {
try {
long len1 = o1.getLength();
long len2 = o2.getLength();
return Long.compare(len2, len1);
} catch (IOException | InterruptedException ie) {
throw new RuntimeException("exception in InputSplit compare", ie);
}
}
}
/**
* Comparator for org.apache.hadoop.mapred.InputSplit
*/
private static class OldInputSplitComparator
implements Comparator<org.apache.hadoop.mapred.InputSplit> {
@Override
public int compare(org.apache.hadoop.mapred.InputSplit o1,
org.apache.hadoop.mapred.InputSplit o2) {
try {
long len1 = o1.getLength();
long len2 = o2.getLength();
return Long.compare(len2, len1);
} catch (IOException ie) {
throw new RuntimeException("Problem getting input split size", ie);
}
}
}
/**
* Generate new-api mapreduce InputFormat splits
* @param jobContext JobContext required by InputFormat
* @param inputSplitDir Directory in which to generate splits information
*
* @return InputSplitInfo containing the split files' information and the
* location hints for each split generated to be used to determining parallelism of
* the map stage.
*/
private static InputSplitInfoDisk writeNewSplits(JobContext jobContext,
Path inputSplitDir) throws IOException, InterruptedException,
ClassNotFoundException {
org.apache.hadoop.mapreduce.InputSplit[] splits =
generateNewSplits(jobContext, false, true, 0);
Configuration conf = jobContext.getConfiguration();
JobSplitWriter.createSplitFiles(inputSplitDir, conf,
inputSplitDir.getFileSystem(conf), splits);
List<TaskLocationHint> locationHints =
new ArrayList<TaskLocationHint>(splits.length);
for (org.apache.hadoop.mapreduce.InputSplit split : splits) {
locationHints.add(
TaskLocationHint.createTaskLocationHint(new HashSet<String>(
Arrays.asList(split.getLocations())), null)
);
}
return new InputSplitInfoDisk(
JobSubmissionFiles.getJobSplitFile(inputSplitDir),
JobSubmissionFiles.getJobSplitMetaFile(inputSplitDir),
splits.length, locationHints, jobContext.getCredentials());
}
/**
* Generate old-api mapred InputFormat splits
* @param jobConf JobConf required by InputFormat class
* @param inputSplitDir Directory in which to generate splits information
*
* @return InputSplitInfo containing the split files' information and the
* number of splits generated to be used to determining parallelism of
* the map stage.
*/
private static InputSplitInfoDisk writeOldSplits(JobConf jobConf,
Path inputSplitDir) throws IOException {
org.apache.hadoop.mapred.InputSplit[] splits =
generateOldSplits(jobConf, false, true, 0);
JobSplitWriter.createSplitFiles(inputSplitDir, jobConf,
inputSplitDir.getFileSystem(jobConf), splits);
List<TaskLocationHint> locationHints =
new ArrayList<>(splits.length);
for (InputSplit split : splits) {
locationHints.add(
TaskLocationHint.createTaskLocationHint(new HashSet<>(
Arrays.asList(split.getLocations())), null)
);
}
return new InputSplitInfoDisk(
JobSubmissionFiles.getJobSplitFile(inputSplitDir),
JobSubmissionFiles.getJobSplitMetaFile(inputSplitDir),
splits.length, locationHints, jobConf.getCredentials());
}
/**
* Helper api to generate splits
* @param conf Configuration with all necessary information set to generate
* splits. The following are required at a minimum:
*
* - mapred.mapper.new-api: determine whether mapred.InputFormat or
* mapreduce.InputFormat is to be used
* - mapred.input.format.class or mapreduce.job.inputformat.class:
* determines the InputFormat class to be used
*
* In addition to this, all the configs needed by the InputFormat class also
* have to be set. For example, FileInputFormat needs the input directory
* paths to be set in the config.
*
* @param inputSplitsDir Directory in which the splits file and meta info file
* will be generated. job.split and job.splitmetainfo files in this directory
* will be overwritten. Should be a fully-qualified path.
*
* @return InputSplitInfo containing the split files' information and the
* number of splits generated to be used to determining parallelism of
* the map stage.
*/
private static InputSplitInfoDisk generateInputSplits(Configuration conf,
Path inputSplitsDir) throws IOException, InterruptedException,
ClassNotFoundException {
Job job = Job.getInstance(conf);
JobConf jobConf = new JobConf(conf);
conf.setBoolean(MRJobConfig.MR_TEZ_SPLITS_VIA_EVENTS, false);
if (jobConf.getUseNewMapper()) {
LOG.info("Generating new input splits"
+ ", splitsDir=" + inputSplitsDir.toString());
return writeNewSplits(job, inputSplitsDir);
} else {
LOG.info("Generating old input splits"
+ ", splitsDir=" + inputSplitsDir.toString());
return writeOldSplits(jobConf, inputSplitsDir);
}
}
/**
* Update provided localResources collection with the required local
* resources needed by MapReduce tasks with respect to Input splits.
*
* @param conf Configuration
* @param inputSplitInfo Information on location of split files
* @param localResources LocalResources collection to be updated
*/
private static void updateLocalResourcesForInputSplits(
Configuration conf,
InputSplitInfo inputSplitInfo,
Map<String, LocalResource> localResources) throws IOException {
if (localResources.containsKey(JOB_SPLIT_RESOURCE_NAME)) {
throw new RuntimeException("LocalResources already contains a"
+ " resource named " + JOB_SPLIT_RESOURCE_NAME);
}
if (localResources.containsKey(JOB_SPLIT_METAINFO_RESOURCE_NAME)) {
throw new RuntimeException("LocalResources already contains a"
+ " resource named " + JOB_SPLIT_METAINFO_RESOURCE_NAME);
}
FileSystem splitsFS = inputSplitInfo.getSplitsFile().getFileSystem(conf);
FileStatus splitFileStatus =
splitsFS.getFileStatus(inputSplitInfo.getSplitsFile());
FileStatus metaInfoFileStatus =
splitsFS.getFileStatus(inputSplitInfo.getSplitsMetaInfoFile());
localResources.put(JOB_SPLIT_RESOURCE_NAME,
LocalResource.newInstance(
ConverterUtils.getYarnUrlFromPath(inputSplitInfo.getSplitsFile()),
LocalResourceType.FILE,
LocalResourceVisibility.APPLICATION,
splitFileStatus.getLen(), splitFileStatus.getModificationTime()));
localResources.put(JOB_SPLIT_METAINFO_RESOURCE_NAME,
LocalResource.newInstance(
ConverterUtils.getYarnUrlFromPath(
inputSplitInfo.getSplitsMetaInfoFile()),
LocalResourceType.FILE,
LocalResourceVisibility.APPLICATION,
metaInfoFileStatus.getLen(),
metaInfoFileStatus.getModificationTime()));
}
/**
* Called to specify that grouping of input splits be performed by Tez
* The conf should have the input format class configuration
* set to the TezGroupedSplitsInputFormat. The real input format class name
* should be passed as an argument to this method.
* <p/>
* With grouping enabled, the eventual configuration used by the tasks, will have
* the user-specified InputFormat replaced by either {@link org.apache.hadoop.mapred.split.TezGroupedSplitsInputFormat}
* or {@link org.apache.hadoop.mapreduce.split.TezGroupedSplitsInputFormat}
*/
@InterfaceAudience.Private
protected static UserPayload createMRInputPayloadWithGrouping(Configuration conf) throws IOException {
Preconditions
.checkArgument(conf != null, "Configuration must be specified");
return createMRInputPayload(TezUtils.createByteStringFromConf(conf),
null, true, true);
}
@InterfaceAudience.Private
protected static UserPayload createMRInputPayload(Configuration conf,
MRRuntimeProtos.MRSplitsProto mrSplitsProto) throws
IOException {
return createMRInputPayload(conf, mrSplitsProto, false, true);
}
/**
* When isGrouped is true, it specifies that grouping of input splits be
* performed by Tez The conf should have the input format class configuration
* set to the TezGroupedSplitsInputFormat. The real input format class name
* should be passed as an argument to this method.
* <p/>
* With grouping enabled, the eventual configuration used by the tasks, will have
* the user-specified InputFormat replaced by either {@link org.apache.hadoop.mapred.split.TezGroupedSplitsInputFormat}
* or {@link org.apache.hadoop.mapreduce.split.TezGroupedSplitsInputFormat}
*/
@InterfaceAudience.Private
protected static UserPayload createMRInputPayload(Configuration conf,
MRRuntimeProtos.MRSplitsProto mrSplitsProto, boolean isGrouped,
boolean isSorted) throws
IOException {
Preconditions
.checkArgument(conf != null, "Configuration must be specified");
return createMRInputPayload(TezUtils.createByteStringFromConf(conf),
mrSplitsProto, isGrouped, isSorted);
}
private static UserPayload createMRInputPayload(ByteString bytes,
MRRuntimeProtos.MRSplitsProto mrSplitsProto,
boolean isGrouped, boolean isSorted) {
MRRuntimeProtos.MRInputUserPayloadProto.Builder userPayloadBuilder =
MRRuntimeProtos.MRInputUserPayloadProto
.newBuilder();
userPayloadBuilder.setConfigurationBytes(bytes);
if (mrSplitsProto != null) {
userPayloadBuilder.setSplits(mrSplitsProto);
}
userPayloadBuilder.setGroupingEnabled(isGrouped);
userPayloadBuilder.setSortSplitsEnabled(isSorted);
return UserPayload.create(userPayloadBuilder.build().
toByteString().asReadOnlyByteBuffer());
}
private static String getStringProperty(Configuration conf, String propertyName) {
Objects.requireNonNull(conf, "Configuration must be provided");
Objects.requireNonNull(propertyName, "Property name must be provided");
return conf.get(propertyName);
}
private static int getIntProperty(Configuration conf, String propertyName) {
return Integer.parseInt(getStringProperty(conf, propertyName));
}
/**
* @see InputContext#getDagIdentifier()
* @param conf configuration instance
* @return dag index
*/
@Public
public static int getDagIndex(Configuration conf) {
return getIntProperty(conf, MRInput.TEZ_MAPREDUCE_DAG_INDEX);
}
/**
* Returns string representation of full DAG identifier
* @param conf configuration instance
* @return dag identifier
*/
@Public
public static String getDagIdString(Configuration conf) {
return getStringProperty(conf, MRInput.TEZ_MAPREDUCE_DAG_ID);
}
/**
* @see InputContext#getTaskVertexIndex
* @param conf configuration instance
* @return vertex index
*/
@Public
public static int getVertexIndex(Configuration conf) {
return getIntProperty(conf, MRInput.TEZ_MAPREDUCE_VERTEX_INDEX);
}
/**
* Returns string representation of full vertex identifier
* @param conf configuration instance
* @return vertex identifier
*/
@Public
public static String getVertexIdString(Configuration conf) {
return getStringProperty(conf, MRInput.TEZ_MAPREDUCE_VERTEX_ID);
}
/**
* @see InputContext#getTaskIndex
* @param conf configuration instance
* @return task index
*/
@Public
public static int getTaskIndex(Configuration conf) {
return getIntProperty(conf, MRInput.TEZ_MAPREDUCE_TASK_INDEX);
}
/**
* Returns string representation of full task identifier
* @param conf configuration instance
* @return task identifier
*/
@Public
public static String getTaskIdString(Configuration conf) {
return getStringProperty(conf, MRInput.TEZ_MAPREDUCE_TASK_ID);
}
/**
* @see InputContext#getTaskAttemptNumber
* @param conf configuration instance
* @return task attempt index
*/
@Public
public static int getTaskAttemptIndex(Configuration conf) {
return getIntProperty(conf, MRInput.TEZ_MAPREDUCE_TASK_ATTEMPT_INDEX);
}
/**
* Returns string representation of full task attempt identifier
* @param conf configuration instance
* @return task attempt identifier
*/
@Public
public static String getTaskAttemptIdString(Configuration conf) {
return getStringProperty(conf, MRInput.TEZ_MAPREDUCE_TASK_ATTEMPT_ID);
}
/**
* @see InputContext#getInputIndex
* @param conf configuration instance
* @return input index
*/
@Public
public static int getInputIndex(Configuration conf) {
return getIntProperty(conf, MRInput.TEZ_MAPREDUCE_INPUT_INDEX);
}
/**
* @see InputContext#getDAGName
* @param conf configuration instance
* @return dag name
*/
@Public
public static String getDagName(Configuration conf) {
return getStringProperty(conf, MRInput.TEZ_MAPREDUCE_DAG_NAME);
}
/**
* @see InputContext#getTaskVertexName
* @param conf configuration instance
* @return vertex name
*/
@Public
public static String getVertexName(Configuration conf) {
return getStringProperty(conf, MRInput.TEZ_MAPREDUCE_VERTEX_NAME);
}
/**
* @see InputContext#getSourceVertexName
* @param conf configuration instance
* @return source name
*/
@Public
public static String getInputName(Configuration conf) {
return getStringProperty(conf, MRInput.TEZ_MAPREDUCE_INPUT_NAME);
}
/**
* @see InputContext#getApplicationId
* @param conf configuration instance
* @return applicationId as a string
*/
@Public
public static String getApplicationIdString(Configuration conf) {
return getStringProperty(conf, MRInput.TEZ_MAPREDUCE_APPLICATION_ID);
}
/**
* @see InputContext#getUniqueIdentifier
* @param conf configuration instance
* @return unique identifier for the input
*/
@Public
public static String getUniqueIdentifier(Configuration conf) {
return getStringProperty(conf, MRInput.TEZ_MAPREDUCE_UNIQUE_IDENTIFIER);
}
/**
* @see InputContext#getDAGAttemptNumber
* @param conf configuration instance
* @return attempt number
*/
@Public
public static int getDagAttemptNumber(Configuration conf) {
return getIntProperty(conf, MRInput.TEZ_MAPREDUCE_DAG_ATTEMPT_NUMBER);
}
public static MRSplitProto getProto(InputDataInformationEvent initEvent, JobConf jobConf) throws IOException {
return Strings.isNullOrEmpty(initEvent.getSerializedPath()) ? readProtoFromPayload(initEvent)
: readProtoFromFs(initEvent, jobConf);
}
private static MRSplitProto readProtoFromFs(InputDataInformationEvent initEvent, JobConf jobConf) throws IOException {
String serializedPath = initEvent.getSerializedPath();
Path filePath = new Path(serializedPath);
LOG.info("Reading InputDataInformationEvent from path: {}", filePath);
MRSplitProto splitProto = null;
FileSystem fs = filePath.getFileSystem(jobConf);
try (FSDataInputStream in = fs.open(filePath)) {
splitProto = MRSplitProto.parseFrom(in);
fs.delete(filePath, false);
}
return splitProto;
}
private static MRSplitProto readProtoFromPayload(InputDataInformationEvent initEvent) throws IOException {
ByteBuffer payload = initEvent.getUserPayload();
LOG.info("Reading InputDataInformationEvent from payload: {}", payload);
return MRSplitProto.parseFrom(ByteString.copyFrom(payload));
}
}
|
googleapis/google-api-java-client-services | 38,049 | clients/google-api-services-compute/beta/1.28.0/com/google/api/services/compute/model/InterconnectAttachment.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.compute.model;
/**
* Represents an Interconnect Attachment (VLAN) resource.
*
* You can use Interconnect attachments (VLANS) to connect your Virtual Private Cloud networks to
* your on-premises networks through an Interconnect. For more information, read Creating VLAN
* Attachments. (== resource_for beta.interconnectAttachments ==) (== resource_for
* v1.interconnectAttachments ==)
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class InterconnectAttachment extends com.google.api.client.json.GenericJson {
/**
* Determines whether this Attachment will carry packets. Not present for PARTNER_PROVIDER.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean adminEnabled;
/**
* Provisioned bandwidth capacity for the interconnect attachment. For attachments of type
* DEDICATED, the user can set the bandwidth. For attachments of type PARTNER, the Google Partner
* that is operating the interconnect must set the bandwidth. Output only for PARTNER type,
* mutable for PARTNER_PROVIDER and DEDICATED, and can take one of the following values: -
* BPS_50M: 50 Mbit/s - BPS_100M: 100 Mbit/s - BPS_200M: 200 Mbit/s - BPS_300M: 300 Mbit/s -
* BPS_400M: 400 Mbit/s - BPS_500M: 500 Mbit/s - BPS_1G: 1 Gbit/s - BPS_2G: 2 Gbit/s - BPS_5G:
* 5 Gbit/s - BPS_10G: 10 Gbit/s - BPS_20G: 20 Gbit/s - BPS_50G: 50 Gbit/s
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String bandwidth;
/**
* Up to 16 candidate prefixes that can be used to restrict the allocation of cloudRouterIpAddress
* and customerRouterIpAddress for this attachment. All prefixes must be within link-local address
* space (169.254.0.0/16) and must be /29 or shorter (/28, /27, etc). Google will attempt to
* select an unused /29 from the supplied candidate prefix(es). The request will fail if all
* possible /29s are in use on Google?s edge. If not supplied, Google will randomly select an
* unused /29 from all of link-local space.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> candidateSubnets;
/**
* [Output Only] IPv4 address + prefix length to be configured on Cloud Router Interface for this
* interconnect attachment.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String cloudRouterIpAddress;
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String creationTimestamp;
/**
* [Output Only] IPv4 address + prefix length to be configured on the customer router subinterface
* for this interconnect attachment.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String customerRouterIpAddress;
/**
* An optional description of this resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String description;
/**
* Desired availability domain for the attachment. Only available for type PARTNER, at creation
* time, and can take one of the following values: - AVAILABILITY_DOMAIN_ANY -
* AVAILABILITY_DOMAIN_1 - AVAILABILITY_DOMAIN_2 For improved reliability, customers should
* configure a pair of attachments, one per availability domain. The selected availability domain
* will be provided to the Partner via the pairing key, so that the provisioned circuit will lie
* in the specified domain. If not specified, the value will default to AVAILABILITY_DOMAIN_ANY.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String edgeAvailabilityDomain;
/**
* [Output Only] Google reference ID, to be used when raising support tickets with Google or
* otherwise to debug backend connectivity issues. [Deprecated] This field is not used.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String googleReferenceId;
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.math.BigInteger id;
/**
* URL of the underlying Interconnect object that this attachment's traffic will traverse through.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String interconnect;
/**
* [Output Only] Type of the resource. Always compute#interconnectAttachment for interconnect
* attachments.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* A fingerprint for the labels being applied to this InterconnectAttachment, which is essentially
* a hash of the labels set used for optimistic locking. The fingerprint is initially generated by
* Compute Engine and changes after every request to modify or update labels. You must always
* provide an up-to-date fingerprint hash in order to update or change labels, otherwise the
* request will fail with error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an InterconnectAttachment.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String labelFingerprint;
/**
* Labels to apply to this InterconnectAttachment resource. These can be later modified by the
* setLabels method. Each label key/value must comply with RFC1035. Label values may be empty.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> labels;
/**
* Name of the resource. Provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* [Output Only] The current status of whether or not this interconnect attachment is functional,
* which can take one of the following values: - OS_ACTIVE: The attachment has been turned up and
* is ready to use. - OS_UNPROVISIONED: The attachment is not ready to use yet, because turnup is
* not complete.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String operationalStatus;
/**
* [Output only for type PARTNER. Input only for PARTNER_PROVIDER. Not present for DEDICATED]. The
* opaque identifier of an PARTNER attachment used to initiate provisioning with a selected
* partner. Of the form "XXXXX/region/domain"
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String pairingKey;
/**
* Optional BGP ASN for the router supplied by a Layer 3 Partner if they configured BGP on behalf
* of the customer. Output only for PARTNER type, input only for PARTNER_PROVIDER, not available
* for DEDICATED.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long partnerAsn;
/**
* Informational metadata about Partner attachments from Partners to display to customers. Output
* only for for PARTNER type, mutable for PARTNER_PROVIDER, not available for DEDICATED.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private InterconnectAttachmentPartnerMetadata partnerMetadata;
/**
* [Output Only] Information specific to an InterconnectAttachment. This property is populated if
* the interconnect that this is attached to is of type DEDICATED.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private InterconnectAttachmentPrivateInfo privateInterconnectInfo;
/**
* [Output Only] URL of the region where the regional interconnect attachment resides. You must
* specify this field as part of the HTTP request URL. It is not settable as a field in the
* request body.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String region;
/**
* URL of the Cloud Router to be used for dynamic routing. This router must be in the same region
* as this InterconnectAttachment. The InterconnectAttachment will automatically connect the
* Interconnect to the network & region within which the Cloud Router is configured.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String router;
/**
* [Output Only] Server-defined URL for the resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String selfLink;
/**
* [Output Only] The current state of this attachment's functionality. Enum values ACTIVE and
* UNPROVISIONED are shared by DEDICATED/PRIVATE, PARTNER, and PARTNER_PROVIDER interconnect
* attachments, while enum values PENDING_PARTNER, PARTNER_REQUEST_RECEIVED, and PENDING_CUSTOMER
* are used for only PARTNER and PARTNER_PROVIDER interconnect attachments. This state can take
* one of the following values: - ACTIVE: The attachment has been turned up and is ready to use.
* - UNPROVISIONED: The attachment is not ready to use yet, because turnup is not complete. -
* PENDING_PARTNER: A newly-created PARTNER attachment that has not yet been configured on the
* Partner side. - PARTNER_REQUEST_RECEIVED: A PARTNER attachment is in the process of
* provisioning after a PARTNER_PROVIDER attachment was created that references it. -
* PENDING_CUSTOMER: A PARTNER or PARTNER_PROVIDER attachment that is waiting for a customer to
* activate it. - DEFUNCT: The attachment was deleted externally and is no longer functional.
* This could be because the associated Interconnect was removed, or because the other side of a
* Partner attachment was deleted.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String state;
/**
* The type of interconnect attachment this is, which can take one of the following values: -
* DEDICATED: an attachment to a Dedicated Interconnect. - PARTNER: an attachment to a Partner
* Interconnect, created by the customer. - PARTNER_PROVIDER: an attachment to a Partner
* Interconnect, created by the partner.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String type;
/**
* The IEEE 802.1Q VLAN tag for this attachment, in the range 2-4094. Only specified at creation
* time.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer vlanTag8021q;
/**
* Determines whether this Attachment will carry packets. Not present for PARTNER_PROVIDER.
* @return value or {@code null} for none
*/
public java.lang.Boolean getAdminEnabled() {
return adminEnabled;
}
/**
* Determines whether this Attachment will carry packets. Not present for PARTNER_PROVIDER.
* @param adminEnabled adminEnabled or {@code null} for none
*/
public InterconnectAttachment setAdminEnabled(java.lang.Boolean adminEnabled) {
this.adminEnabled = adminEnabled;
return this;
}
/**
* Provisioned bandwidth capacity for the interconnect attachment. For attachments of type
* DEDICATED, the user can set the bandwidth. For attachments of type PARTNER, the Google Partner
* that is operating the interconnect must set the bandwidth. Output only for PARTNER type,
* mutable for PARTNER_PROVIDER and DEDICATED, and can take one of the following values: -
* BPS_50M: 50 Mbit/s - BPS_100M: 100 Mbit/s - BPS_200M: 200 Mbit/s - BPS_300M: 300 Mbit/s -
* BPS_400M: 400 Mbit/s - BPS_500M: 500 Mbit/s - BPS_1G: 1 Gbit/s - BPS_2G: 2 Gbit/s - BPS_5G:
* 5 Gbit/s - BPS_10G: 10 Gbit/s - BPS_20G: 20 Gbit/s - BPS_50G: 50 Gbit/s
* @return value or {@code null} for none
*/
public java.lang.String getBandwidth() {
return bandwidth;
}
/**
* Provisioned bandwidth capacity for the interconnect attachment. For attachments of type
* DEDICATED, the user can set the bandwidth. For attachments of type PARTNER, the Google Partner
* that is operating the interconnect must set the bandwidth. Output only for PARTNER type,
* mutable for PARTNER_PROVIDER and DEDICATED, and can take one of the following values: -
* BPS_50M: 50 Mbit/s - BPS_100M: 100 Mbit/s - BPS_200M: 200 Mbit/s - BPS_300M: 300 Mbit/s -
* BPS_400M: 400 Mbit/s - BPS_500M: 500 Mbit/s - BPS_1G: 1 Gbit/s - BPS_2G: 2 Gbit/s - BPS_5G:
* 5 Gbit/s - BPS_10G: 10 Gbit/s - BPS_20G: 20 Gbit/s - BPS_50G: 50 Gbit/s
* @param bandwidth bandwidth or {@code null} for none
*/
public InterconnectAttachment setBandwidth(java.lang.String bandwidth) {
this.bandwidth = bandwidth;
return this;
}
/**
* Up to 16 candidate prefixes that can be used to restrict the allocation of cloudRouterIpAddress
* and customerRouterIpAddress for this attachment. All prefixes must be within link-local address
* space (169.254.0.0/16) and must be /29 or shorter (/28, /27, etc). Google will attempt to
* select an unused /29 from the supplied candidate prefix(es). The request will fail if all
* possible /29s are in use on Google?s edge. If not supplied, Google will randomly select an
* unused /29 from all of link-local space.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getCandidateSubnets() {
return candidateSubnets;
}
/**
* Up to 16 candidate prefixes that can be used to restrict the allocation of cloudRouterIpAddress
* and customerRouterIpAddress for this attachment. All prefixes must be within link-local address
* space (169.254.0.0/16) and must be /29 or shorter (/28, /27, etc). Google will attempt to
* select an unused /29 from the supplied candidate prefix(es). The request will fail if all
* possible /29s are in use on Google?s edge. If not supplied, Google will randomly select an
* unused /29 from all of link-local space.
* @param candidateSubnets candidateSubnets or {@code null} for none
*/
public InterconnectAttachment setCandidateSubnets(java.util.List<java.lang.String> candidateSubnets) {
this.candidateSubnets = candidateSubnets;
return this;
}
/**
* [Output Only] IPv4 address + prefix length to be configured on Cloud Router Interface for this
* interconnect attachment.
* @return value or {@code null} for none
*/
public java.lang.String getCloudRouterIpAddress() {
return cloudRouterIpAddress;
}
/**
* [Output Only] IPv4 address + prefix length to be configured on Cloud Router Interface for this
* interconnect attachment.
* @param cloudRouterIpAddress cloudRouterIpAddress or {@code null} for none
*/
public InterconnectAttachment setCloudRouterIpAddress(java.lang.String cloudRouterIpAddress) {
this.cloudRouterIpAddress = cloudRouterIpAddress;
return this;
}
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* @return value or {@code null} for none
*/
public java.lang.String getCreationTimestamp() {
return creationTimestamp;
}
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* @param creationTimestamp creationTimestamp or {@code null} for none
*/
public InterconnectAttachment setCreationTimestamp(java.lang.String creationTimestamp) {
this.creationTimestamp = creationTimestamp;
return this;
}
/**
* [Output Only] IPv4 address + prefix length to be configured on the customer router subinterface
* for this interconnect attachment.
* @return value or {@code null} for none
*/
public java.lang.String getCustomerRouterIpAddress() {
return customerRouterIpAddress;
}
/**
* [Output Only] IPv4 address + prefix length to be configured on the customer router subinterface
* for this interconnect attachment.
* @param customerRouterIpAddress customerRouterIpAddress or {@code null} for none
*/
public InterconnectAttachment setCustomerRouterIpAddress(java.lang.String customerRouterIpAddress) {
this.customerRouterIpAddress = customerRouterIpAddress;
return this;
}
/**
* An optional description of this resource.
* @return value or {@code null} for none
*/
public java.lang.String getDescription() {
return description;
}
/**
* An optional description of this resource.
* @param description description or {@code null} for none
*/
public InterconnectAttachment setDescription(java.lang.String description) {
this.description = description;
return this;
}
/**
* Desired availability domain for the attachment. Only available for type PARTNER, at creation
* time, and can take one of the following values: - AVAILABILITY_DOMAIN_ANY -
* AVAILABILITY_DOMAIN_1 - AVAILABILITY_DOMAIN_2 For improved reliability, customers should
* configure a pair of attachments, one per availability domain. The selected availability domain
* will be provided to the Partner via the pairing key, so that the provisioned circuit will lie
* in the specified domain. If not specified, the value will default to AVAILABILITY_DOMAIN_ANY.
* @return value or {@code null} for none
*/
public java.lang.String getEdgeAvailabilityDomain() {
return edgeAvailabilityDomain;
}
/**
* Desired availability domain for the attachment. Only available for type PARTNER, at creation
* time, and can take one of the following values: - AVAILABILITY_DOMAIN_ANY -
* AVAILABILITY_DOMAIN_1 - AVAILABILITY_DOMAIN_2 For improved reliability, customers should
* configure a pair of attachments, one per availability domain. The selected availability domain
* will be provided to the Partner via the pairing key, so that the provisioned circuit will lie
* in the specified domain. If not specified, the value will default to AVAILABILITY_DOMAIN_ANY.
* @param edgeAvailabilityDomain edgeAvailabilityDomain or {@code null} for none
*/
public InterconnectAttachment setEdgeAvailabilityDomain(java.lang.String edgeAvailabilityDomain) {
this.edgeAvailabilityDomain = edgeAvailabilityDomain;
return this;
}
/**
* [Output Only] Google reference ID, to be used when raising support tickets with Google or
* otherwise to debug backend connectivity issues. [Deprecated] This field is not used.
* @return value or {@code null} for none
*/
public java.lang.String getGoogleReferenceId() {
return googleReferenceId;
}
/**
* [Output Only] Google reference ID, to be used when raising support tickets with Google or
* otherwise to debug backend connectivity issues. [Deprecated] This field is not used.
* @param googleReferenceId googleReferenceId or {@code null} for none
*/
public InterconnectAttachment setGoogleReferenceId(java.lang.String googleReferenceId) {
this.googleReferenceId = googleReferenceId;
return this;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @return value or {@code null} for none
*/
public java.math.BigInteger getId() {
return id;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @param id id or {@code null} for none
*/
public InterconnectAttachment setId(java.math.BigInteger id) {
this.id = id;
return this;
}
/**
* URL of the underlying Interconnect object that this attachment's traffic will traverse through.
* @return value or {@code null} for none
*/
public java.lang.String getInterconnect() {
return interconnect;
}
/**
* URL of the underlying Interconnect object that this attachment's traffic will traverse through.
* @param interconnect interconnect or {@code null} for none
*/
public InterconnectAttachment setInterconnect(java.lang.String interconnect) {
this.interconnect = interconnect;
return this;
}
/**
* [Output Only] Type of the resource. Always compute#interconnectAttachment for interconnect
* attachments.
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* [Output Only] Type of the resource. Always compute#interconnectAttachment for interconnect
* attachments.
* @param kind kind or {@code null} for none
*/
public InterconnectAttachment setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* A fingerprint for the labels being applied to this InterconnectAttachment, which is essentially
* a hash of the labels set used for optimistic locking. The fingerprint is initially generated by
* Compute Engine and changes after every request to modify or update labels. You must always
* provide an up-to-date fingerprint hash in order to update or change labels, otherwise the
* request will fail with error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an InterconnectAttachment.
* @see #decodeLabelFingerprint()
* @return value or {@code null} for none
*/
public java.lang.String getLabelFingerprint() {
return labelFingerprint;
}
/**
* A fingerprint for the labels being applied to this InterconnectAttachment, which is essentially
* a hash of the labels set used for optimistic locking. The fingerprint is initially generated by
* Compute Engine and changes after every request to modify or update labels. You must always
* provide an up-to-date fingerprint hash in order to update or change labels, otherwise the
* request will fail with error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an InterconnectAttachment.
* @see #getLabelFingerprint()
* @return Base64 decoded value or {@code null} for none
*
* @since 1.14
*/
public byte[] decodeLabelFingerprint() {
return com.google.api.client.util.Base64.decodeBase64(labelFingerprint);
}
/**
* A fingerprint for the labels being applied to this InterconnectAttachment, which is essentially
* a hash of the labels set used for optimistic locking. The fingerprint is initially generated by
* Compute Engine and changes after every request to modify or update labels. You must always
* provide an up-to-date fingerprint hash in order to update or change labels, otherwise the
* request will fail with error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an InterconnectAttachment.
* @see #encodeLabelFingerprint()
* @param labelFingerprint labelFingerprint or {@code null} for none
*/
public InterconnectAttachment setLabelFingerprint(java.lang.String labelFingerprint) {
this.labelFingerprint = labelFingerprint;
return this;
}
/**
* A fingerprint for the labels being applied to this InterconnectAttachment, which is essentially
* a hash of the labels set used for optimistic locking. The fingerprint is initially generated by
* Compute Engine and changes after every request to modify or update labels. You must always
* provide an up-to-date fingerprint hash in order to update or change labels, otherwise the
* request will fail with error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an InterconnectAttachment.
* @see #setLabelFingerprint()
*
* <p>
* The value is encoded Base64 or {@code null} for none.
* </p>
*
* @since 1.14
*/
public InterconnectAttachment encodeLabelFingerprint(byte[] labelFingerprint) {
this.labelFingerprint = com.google.api.client.util.Base64.encodeBase64URLSafeString(labelFingerprint);
return this;
}
/**
* Labels to apply to this InterconnectAttachment resource. These can be later modified by the
* setLabels method. Each label key/value must comply with RFC1035. Label values may be empty.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getLabels() {
return labels;
}
/**
* Labels to apply to this InterconnectAttachment resource. These can be later modified by the
* setLabels method. Each label key/value must comply with RFC1035. Label values may be empty.
* @param labels labels or {@code null} for none
*/
public InterconnectAttachment setLabels(java.util.Map<String, java.lang.String> labels) {
this.labels = labels;
return this;
}
/**
* Name of the resource. Provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Name of the resource. Provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* @param name name or {@code null} for none
*/
public InterconnectAttachment setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* [Output Only] The current status of whether or not this interconnect attachment is functional,
* which can take one of the following values: - OS_ACTIVE: The attachment has been turned up and
* is ready to use. - OS_UNPROVISIONED: The attachment is not ready to use yet, because turnup is
* not complete.
* @return value or {@code null} for none
*/
public java.lang.String getOperationalStatus() {
return operationalStatus;
}
/**
* [Output Only] The current status of whether or not this interconnect attachment is functional,
* which can take one of the following values: - OS_ACTIVE: The attachment has been turned up and
* is ready to use. - OS_UNPROVISIONED: The attachment is not ready to use yet, because turnup is
* not complete.
* @param operationalStatus operationalStatus or {@code null} for none
*/
public InterconnectAttachment setOperationalStatus(java.lang.String operationalStatus) {
this.operationalStatus = operationalStatus;
return this;
}
/**
* [Output only for type PARTNER. Input only for PARTNER_PROVIDER. Not present for DEDICATED]. The
* opaque identifier of an PARTNER attachment used to initiate provisioning with a selected
* partner. Of the form "XXXXX/region/domain"
* @return value or {@code null} for none
*/
public java.lang.String getPairingKey() {
return pairingKey;
}
/**
* [Output only for type PARTNER. Input only for PARTNER_PROVIDER. Not present for DEDICATED]. The
* opaque identifier of an PARTNER attachment used to initiate provisioning with a selected
* partner. Of the form "XXXXX/region/domain"
* @param pairingKey pairingKey or {@code null} for none
*/
public InterconnectAttachment setPairingKey(java.lang.String pairingKey) {
this.pairingKey = pairingKey;
return this;
}
/**
* Optional BGP ASN for the router supplied by a Layer 3 Partner if they configured BGP on behalf
* of the customer. Output only for PARTNER type, input only for PARTNER_PROVIDER, not available
* for DEDICATED.
* @return value or {@code null} for none
*/
public java.lang.Long getPartnerAsn() {
return partnerAsn;
}
/**
* Optional BGP ASN for the router supplied by a Layer 3 Partner if they configured BGP on behalf
* of the customer. Output only for PARTNER type, input only for PARTNER_PROVIDER, not available
* for DEDICATED.
* @param partnerAsn partnerAsn or {@code null} for none
*/
public InterconnectAttachment setPartnerAsn(java.lang.Long partnerAsn) {
this.partnerAsn = partnerAsn;
return this;
}
/**
* Informational metadata about Partner attachments from Partners to display to customers. Output
* only for for PARTNER type, mutable for PARTNER_PROVIDER, not available for DEDICATED.
* @return value or {@code null} for none
*/
public InterconnectAttachmentPartnerMetadata getPartnerMetadata() {
return partnerMetadata;
}
/**
* Informational metadata about Partner attachments from Partners to display to customers. Output
* only for for PARTNER type, mutable for PARTNER_PROVIDER, not available for DEDICATED.
* @param partnerMetadata partnerMetadata or {@code null} for none
*/
public InterconnectAttachment setPartnerMetadata(InterconnectAttachmentPartnerMetadata partnerMetadata) {
this.partnerMetadata = partnerMetadata;
return this;
}
/**
* [Output Only] Information specific to an InterconnectAttachment. This property is populated if
* the interconnect that this is attached to is of type DEDICATED.
* @return value or {@code null} for none
*/
public InterconnectAttachmentPrivateInfo getPrivateInterconnectInfo() {
return privateInterconnectInfo;
}
/**
* [Output Only] Information specific to an InterconnectAttachment. This property is populated if
* the interconnect that this is attached to is of type DEDICATED.
* @param privateInterconnectInfo privateInterconnectInfo or {@code null} for none
*/
public InterconnectAttachment setPrivateInterconnectInfo(InterconnectAttachmentPrivateInfo privateInterconnectInfo) {
this.privateInterconnectInfo = privateInterconnectInfo;
return this;
}
/**
* [Output Only] URL of the region where the regional interconnect attachment resides. You must
* specify this field as part of the HTTP request URL. It is not settable as a field in the
* request body.
* @return value or {@code null} for none
*/
public java.lang.String getRegion() {
return region;
}
/**
* [Output Only] URL of the region where the regional interconnect attachment resides. You must
* specify this field as part of the HTTP request URL. It is not settable as a field in the
* request body.
* @param region region or {@code null} for none
*/
public InterconnectAttachment setRegion(java.lang.String region) {
this.region = region;
return this;
}
/**
* URL of the Cloud Router to be used for dynamic routing. This router must be in the same region
* as this InterconnectAttachment. The InterconnectAttachment will automatically connect the
* Interconnect to the network & region within which the Cloud Router is configured.
* @return value or {@code null} for none
*/
public java.lang.String getRouter() {
return router;
}
/**
* URL of the Cloud Router to be used for dynamic routing. This router must be in the same region
* as this InterconnectAttachment. The InterconnectAttachment will automatically connect the
* Interconnect to the network & region within which the Cloud Router is configured.
* @param router router or {@code null} for none
*/
public InterconnectAttachment setRouter(java.lang.String router) {
this.router = router;
return this;
}
/**
* [Output Only] Server-defined URL for the resource.
* @return value or {@code null} for none
*/
public java.lang.String getSelfLink() {
return selfLink;
}
/**
* [Output Only] Server-defined URL for the resource.
* @param selfLink selfLink or {@code null} for none
*/
public InterconnectAttachment setSelfLink(java.lang.String selfLink) {
this.selfLink = selfLink;
return this;
}
/**
* [Output Only] The current state of this attachment's functionality. Enum values ACTIVE and
* UNPROVISIONED are shared by DEDICATED/PRIVATE, PARTNER, and PARTNER_PROVIDER interconnect
* attachments, while enum values PENDING_PARTNER, PARTNER_REQUEST_RECEIVED, and PENDING_CUSTOMER
* are used for only PARTNER and PARTNER_PROVIDER interconnect attachments. This state can take
* one of the following values: - ACTIVE: The attachment has been turned up and is ready to use.
* - UNPROVISIONED: The attachment is not ready to use yet, because turnup is not complete. -
* PENDING_PARTNER: A newly-created PARTNER attachment that has not yet been configured on the
* Partner side. - PARTNER_REQUEST_RECEIVED: A PARTNER attachment is in the process of
* provisioning after a PARTNER_PROVIDER attachment was created that references it. -
* PENDING_CUSTOMER: A PARTNER or PARTNER_PROVIDER attachment that is waiting for a customer to
* activate it. - DEFUNCT: The attachment was deleted externally and is no longer functional.
* This could be because the associated Interconnect was removed, or because the other side of a
* Partner attachment was deleted.
* @return value or {@code null} for none
*/
public java.lang.String getState() {
return state;
}
/**
* [Output Only] The current state of this attachment's functionality. Enum values ACTIVE and
* UNPROVISIONED are shared by DEDICATED/PRIVATE, PARTNER, and PARTNER_PROVIDER interconnect
* attachments, while enum values PENDING_PARTNER, PARTNER_REQUEST_RECEIVED, and PENDING_CUSTOMER
* are used for only PARTNER and PARTNER_PROVIDER interconnect attachments. This state can take
* one of the following values: - ACTIVE: The attachment has been turned up and is ready to use.
* - UNPROVISIONED: The attachment is not ready to use yet, because turnup is not complete. -
* PENDING_PARTNER: A newly-created PARTNER attachment that has not yet been configured on the
* Partner side. - PARTNER_REQUEST_RECEIVED: A PARTNER attachment is in the process of
* provisioning after a PARTNER_PROVIDER attachment was created that references it. -
* PENDING_CUSTOMER: A PARTNER or PARTNER_PROVIDER attachment that is waiting for a customer to
* activate it. - DEFUNCT: The attachment was deleted externally and is no longer functional.
* This could be because the associated Interconnect was removed, or because the other side of a
* Partner attachment was deleted.
* @param state state or {@code null} for none
*/
public InterconnectAttachment setState(java.lang.String state) {
this.state = state;
return this;
}
/**
* The type of interconnect attachment this is, which can take one of the following values: -
* DEDICATED: an attachment to a Dedicated Interconnect. - PARTNER: an attachment to a Partner
* Interconnect, created by the customer. - PARTNER_PROVIDER: an attachment to a Partner
* Interconnect, created by the partner.
* @return value or {@code null} for none
*/
public java.lang.String getType() {
return type;
}
/**
* The type of interconnect attachment this is, which can take one of the following values: -
* DEDICATED: an attachment to a Dedicated Interconnect. - PARTNER: an attachment to a Partner
* Interconnect, created by the customer. - PARTNER_PROVIDER: an attachment to a Partner
* Interconnect, created by the partner.
* @param type type or {@code null} for none
*/
public InterconnectAttachment setType(java.lang.String type) {
this.type = type;
return this;
}
/**
* The IEEE 802.1Q VLAN tag for this attachment, in the range 2-4094. Only specified at creation
* time.
* @return value or {@code null} for none
*/
public java.lang.Integer getVlanTag8021q() {
return vlanTag8021q;
}
/**
* The IEEE 802.1Q VLAN tag for this attachment, in the range 2-4094. Only specified at creation
* time.
* @param vlanTag8021q vlanTag8021q or {@code null} for none
*/
public InterconnectAttachment setVlanTag8021q(java.lang.Integer vlanTag8021q) {
this.vlanTag8021q = vlanTag8021q;
return this;
}
@Override
public InterconnectAttachment set(String fieldName, Object value) {
return (InterconnectAttachment) super.set(fieldName, value);
}
@Override
public InterconnectAttachment clone() {
return (InterconnectAttachment) super.clone();
}
}
|
googleapis/google-api-java-client-services | 38,049 | clients/google-api-services-compute/beta/1.29.2/com/google/api/services/compute/model/InterconnectAttachment.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.compute.model;
/**
* Represents an Interconnect Attachment (VLAN) resource.
*
* You can use Interconnect attachments (VLANS) to connect your Virtual Private Cloud networks to
* your on-premises networks through an Interconnect. For more information, read Creating VLAN
* Attachments. (== resource_for beta.interconnectAttachments ==) (== resource_for
* v1.interconnectAttachments ==)
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class InterconnectAttachment extends com.google.api.client.json.GenericJson {
/**
* Determines whether this Attachment will carry packets. Not present for PARTNER_PROVIDER.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean adminEnabled;
/**
* Provisioned bandwidth capacity for the interconnect attachment. For attachments of type
* DEDICATED, the user can set the bandwidth. For attachments of type PARTNER, the Google Partner
* that is operating the interconnect must set the bandwidth. Output only for PARTNER type,
* mutable for PARTNER_PROVIDER and DEDICATED, and can take one of the following values: -
* BPS_50M: 50 Mbit/s - BPS_100M: 100 Mbit/s - BPS_200M: 200 Mbit/s - BPS_300M: 300 Mbit/s -
* BPS_400M: 400 Mbit/s - BPS_500M: 500 Mbit/s - BPS_1G: 1 Gbit/s - BPS_2G: 2 Gbit/s - BPS_5G:
* 5 Gbit/s - BPS_10G: 10 Gbit/s - BPS_20G: 20 Gbit/s - BPS_50G: 50 Gbit/s
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String bandwidth;
/**
* Up to 16 candidate prefixes that can be used to restrict the allocation of cloudRouterIpAddress
* and customerRouterIpAddress for this attachment. All prefixes must be within link-local address
* space (169.254.0.0/16) and must be /29 or shorter (/28, /27, etc). Google will attempt to
* select an unused /29 from the supplied candidate prefix(es). The request will fail if all
* possible /29s are in use on Google?s edge. If not supplied, Google will randomly select an
* unused /29 from all of link-local space.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> candidateSubnets;
/**
* [Output Only] IPv4 address + prefix length to be configured on Cloud Router Interface for this
* interconnect attachment.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String cloudRouterIpAddress;
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String creationTimestamp;
/**
* [Output Only] IPv4 address + prefix length to be configured on the customer router subinterface
* for this interconnect attachment.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String customerRouterIpAddress;
/**
* An optional description of this resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String description;
/**
* Desired availability domain for the attachment. Only available for type PARTNER, at creation
* time, and can take one of the following values: - AVAILABILITY_DOMAIN_ANY -
* AVAILABILITY_DOMAIN_1 - AVAILABILITY_DOMAIN_2 For improved reliability, customers should
* configure a pair of attachments, one per availability domain. The selected availability domain
* will be provided to the Partner via the pairing key, so that the provisioned circuit will lie
* in the specified domain. If not specified, the value will default to AVAILABILITY_DOMAIN_ANY.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String edgeAvailabilityDomain;
/**
* [Output Only] Google reference ID, to be used when raising support tickets with Google or
* otherwise to debug backend connectivity issues. [Deprecated] This field is not used.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String googleReferenceId;
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.math.BigInteger id;
/**
* URL of the underlying Interconnect object that this attachment's traffic will traverse through.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String interconnect;
/**
* [Output Only] Type of the resource. Always compute#interconnectAttachment for interconnect
* attachments.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* A fingerprint for the labels being applied to this InterconnectAttachment, which is essentially
* a hash of the labels set used for optimistic locking. The fingerprint is initially generated by
* Compute Engine and changes after every request to modify or update labels. You must always
* provide an up-to-date fingerprint hash in order to update or change labels, otherwise the
* request will fail with error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an InterconnectAttachment.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String labelFingerprint;
/**
* Labels to apply to this InterconnectAttachment resource. These can be later modified by the
* setLabels method. Each label key/value must comply with RFC1035. Label values may be empty.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> labels;
/**
* Name of the resource. Provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* [Output Only] The current status of whether or not this interconnect attachment is functional,
* which can take one of the following values: - OS_ACTIVE: The attachment has been turned up and
* is ready to use. - OS_UNPROVISIONED: The attachment is not ready to use yet, because turnup is
* not complete.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String operationalStatus;
/**
* [Output only for type PARTNER. Input only for PARTNER_PROVIDER. Not present for DEDICATED]. The
* opaque identifier of an PARTNER attachment used to initiate provisioning with a selected
* partner. Of the form "XXXXX/region/domain"
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String pairingKey;
/**
* Optional BGP ASN for the router supplied by a Layer 3 Partner if they configured BGP on behalf
* of the customer. Output only for PARTNER type, input only for PARTNER_PROVIDER, not available
* for DEDICATED.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long partnerAsn;
/**
* Informational metadata about Partner attachments from Partners to display to customers. Output
* only for for PARTNER type, mutable for PARTNER_PROVIDER, not available for DEDICATED.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private InterconnectAttachmentPartnerMetadata partnerMetadata;
/**
* [Output Only] Information specific to an InterconnectAttachment. This property is populated if
* the interconnect that this is attached to is of type DEDICATED.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private InterconnectAttachmentPrivateInfo privateInterconnectInfo;
/**
* [Output Only] URL of the region where the regional interconnect attachment resides. You must
* specify this field as part of the HTTP request URL. It is not settable as a field in the
* request body.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String region;
/**
* URL of the Cloud Router to be used for dynamic routing. This router must be in the same region
* as this InterconnectAttachment. The InterconnectAttachment will automatically connect the
* Interconnect to the network & region within which the Cloud Router is configured.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String router;
/**
* [Output Only] Server-defined URL for the resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String selfLink;
/**
* [Output Only] The current state of this attachment's functionality. Enum values ACTIVE and
* UNPROVISIONED are shared by DEDICATED/PRIVATE, PARTNER, and PARTNER_PROVIDER interconnect
* attachments, while enum values PENDING_PARTNER, PARTNER_REQUEST_RECEIVED, and PENDING_CUSTOMER
* are used for only PARTNER and PARTNER_PROVIDER interconnect attachments. This state can take
* one of the following values: - ACTIVE: The attachment has been turned up and is ready to use.
* - UNPROVISIONED: The attachment is not ready to use yet, because turnup is not complete. -
* PENDING_PARTNER: A newly-created PARTNER attachment that has not yet been configured on the
* Partner side. - PARTNER_REQUEST_RECEIVED: A PARTNER attachment is in the process of
* provisioning after a PARTNER_PROVIDER attachment was created that references it. -
* PENDING_CUSTOMER: A PARTNER or PARTNER_PROVIDER attachment that is waiting for a customer to
* activate it. - DEFUNCT: The attachment was deleted externally and is no longer functional.
* This could be because the associated Interconnect was removed, or because the other side of a
* Partner attachment was deleted.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String state;
/**
* The type of interconnect attachment this is, which can take one of the following values: -
* DEDICATED: an attachment to a Dedicated Interconnect. - PARTNER: an attachment to a Partner
* Interconnect, created by the customer. - PARTNER_PROVIDER: an attachment to a Partner
* Interconnect, created by the partner.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String type;
/**
* The IEEE 802.1Q VLAN tag for this attachment, in the range 2-4094. Only specified at creation
* time.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer vlanTag8021q;
/**
* Determines whether this Attachment will carry packets. Not present for PARTNER_PROVIDER.
* @return value or {@code null} for none
*/
public java.lang.Boolean getAdminEnabled() {
return adminEnabled;
}
/**
* Determines whether this Attachment will carry packets. Not present for PARTNER_PROVIDER.
* @param adminEnabled adminEnabled or {@code null} for none
*/
public InterconnectAttachment setAdminEnabled(java.lang.Boolean adminEnabled) {
this.adminEnabled = adminEnabled;
return this;
}
/**
* Provisioned bandwidth capacity for the interconnect attachment. For attachments of type
* DEDICATED, the user can set the bandwidth. For attachments of type PARTNER, the Google Partner
* that is operating the interconnect must set the bandwidth. Output only for PARTNER type,
* mutable for PARTNER_PROVIDER and DEDICATED, and can take one of the following values: -
* BPS_50M: 50 Mbit/s - BPS_100M: 100 Mbit/s - BPS_200M: 200 Mbit/s - BPS_300M: 300 Mbit/s -
* BPS_400M: 400 Mbit/s - BPS_500M: 500 Mbit/s - BPS_1G: 1 Gbit/s - BPS_2G: 2 Gbit/s - BPS_5G:
* 5 Gbit/s - BPS_10G: 10 Gbit/s - BPS_20G: 20 Gbit/s - BPS_50G: 50 Gbit/s
* @return value or {@code null} for none
*/
public java.lang.String getBandwidth() {
return bandwidth;
}
/**
* Provisioned bandwidth capacity for the interconnect attachment. For attachments of type
* DEDICATED, the user can set the bandwidth. For attachments of type PARTNER, the Google Partner
* that is operating the interconnect must set the bandwidth. Output only for PARTNER type,
* mutable for PARTNER_PROVIDER and DEDICATED, and can take one of the following values: -
* BPS_50M: 50 Mbit/s - BPS_100M: 100 Mbit/s - BPS_200M: 200 Mbit/s - BPS_300M: 300 Mbit/s -
* BPS_400M: 400 Mbit/s - BPS_500M: 500 Mbit/s - BPS_1G: 1 Gbit/s - BPS_2G: 2 Gbit/s - BPS_5G:
* 5 Gbit/s - BPS_10G: 10 Gbit/s - BPS_20G: 20 Gbit/s - BPS_50G: 50 Gbit/s
* @param bandwidth bandwidth or {@code null} for none
*/
public InterconnectAttachment setBandwidth(java.lang.String bandwidth) {
this.bandwidth = bandwidth;
return this;
}
/**
* Up to 16 candidate prefixes that can be used to restrict the allocation of cloudRouterIpAddress
* and customerRouterIpAddress for this attachment. All prefixes must be within link-local address
* space (169.254.0.0/16) and must be /29 or shorter (/28, /27, etc). Google will attempt to
* select an unused /29 from the supplied candidate prefix(es). The request will fail if all
* possible /29s are in use on Google?s edge. If not supplied, Google will randomly select an
* unused /29 from all of link-local space.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getCandidateSubnets() {
return candidateSubnets;
}
/**
* Up to 16 candidate prefixes that can be used to restrict the allocation of cloudRouterIpAddress
* and customerRouterIpAddress for this attachment. All prefixes must be within link-local address
* space (169.254.0.0/16) and must be /29 or shorter (/28, /27, etc). Google will attempt to
* select an unused /29 from the supplied candidate prefix(es). The request will fail if all
* possible /29s are in use on Google?s edge. If not supplied, Google will randomly select an
* unused /29 from all of link-local space.
* @param candidateSubnets candidateSubnets or {@code null} for none
*/
public InterconnectAttachment setCandidateSubnets(java.util.List<java.lang.String> candidateSubnets) {
this.candidateSubnets = candidateSubnets;
return this;
}
/**
* [Output Only] IPv4 address + prefix length to be configured on Cloud Router Interface for this
* interconnect attachment.
* @return value or {@code null} for none
*/
public java.lang.String getCloudRouterIpAddress() {
return cloudRouterIpAddress;
}
/**
* [Output Only] IPv4 address + prefix length to be configured on Cloud Router Interface for this
* interconnect attachment.
* @param cloudRouterIpAddress cloudRouterIpAddress or {@code null} for none
*/
public InterconnectAttachment setCloudRouterIpAddress(java.lang.String cloudRouterIpAddress) {
this.cloudRouterIpAddress = cloudRouterIpAddress;
return this;
}
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* @return value or {@code null} for none
*/
public java.lang.String getCreationTimestamp() {
return creationTimestamp;
}
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* @param creationTimestamp creationTimestamp or {@code null} for none
*/
public InterconnectAttachment setCreationTimestamp(java.lang.String creationTimestamp) {
this.creationTimestamp = creationTimestamp;
return this;
}
/**
* [Output Only] IPv4 address + prefix length to be configured on the customer router subinterface
* for this interconnect attachment.
* @return value or {@code null} for none
*/
public java.lang.String getCustomerRouterIpAddress() {
return customerRouterIpAddress;
}
/**
* [Output Only] IPv4 address + prefix length to be configured on the customer router subinterface
* for this interconnect attachment.
* @param customerRouterIpAddress customerRouterIpAddress or {@code null} for none
*/
public InterconnectAttachment setCustomerRouterIpAddress(java.lang.String customerRouterIpAddress) {
this.customerRouterIpAddress = customerRouterIpAddress;
return this;
}
/**
* An optional description of this resource.
* @return value or {@code null} for none
*/
public java.lang.String getDescription() {
return description;
}
/**
* An optional description of this resource.
* @param description description or {@code null} for none
*/
public InterconnectAttachment setDescription(java.lang.String description) {
this.description = description;
return this;
}
/**
* Desired availability domain for the attachment. Only available for type PARTNER, at creation
* time, and can take one of the following values: - AVAILABILITY_DOMAIN_ANY -
* AVAILABILITY_DOMAIN_1 - AVAILABILITY_DOMAIN_2 For improved reliability, customers should
* configure a pair of attachments, one per availability domain. The selected availability domain
* will be provided to the Partner via the pairing key, so that the provisioned circuit will lie
* in the specified domain. If not specified, the value will default to AVAILABILITY_DOMAIN_ANY.
* @return value or {@code null} for none
*/
public java.lang.String getEdgeAvailabilityDomain() {
return edgeAvailabilityDomain;
}
/**
* Desired availability domain for the attachment. Only available for type PARTNER, at creation
* time, and can take one of the following values: - AVAILABILITY_DOMAIN_ANY -
* AVAILABILITY_DOMAIN_1 - AVAILABILITY_DOMAIN_2 For improved reliability, customers should
* configure a pair of attachments, one per availability domain. The selected availability domain
* will be provided to the Partner via the pairing key, so that the provisioned circuit will lie
* in the specified domain. If not specified, the value will default to AVAILABILITY_DOMAIN_ANY.
* @param edgeAvailabilityDomain edgeAvailabilityDomain or {@code null} for none
*/
public InterconnectAttachment setEdgeAvailabilityDomain(java.lang.String edgeAvailabilityDomain) {
this.edgeAvailabilityDomain = edgeAvailabilityDomain;
return this;
}
/**
* [Output Only] Google reference ID, to be used when raising support tickets with Google or
* otherwise to debug backend connectivity issues. [Deprecated] This field is not used.
* @return value or {@code null} for none
*/
public java.lang.String getGoogleReferenceId() {
return googleReferenceId;
}
/**
* [Output Only] Google reference ID, to be used when raising support tickets with Google or
* otherwise to debug backend connectivity issues. [Deprecated] This field is not used.
* @param googleReferenceId googleReferenceId or {@code null} for none
*/
public InterconnectAttachment setGoogleReferenceId(java.lang.String googleReferenceId) {
this.googleReferenceId = googleReferenceId;
return this;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @return value or {@code null} for none
*/
public java.math.BigInteger getId() {
return id;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @param id id or {@code null} for none
*/
public InterconnectAttachment setId(java.math.BigInteger id) {
this.id = id;
return this;
}
/**
* URL of the underlying Interconnect object that this attachment's traffic will traverse through.
* @return value or {@code null} for none
*/
public java.lang.String getInterconnect() {
return interconnect;
}
/**
* URL of the underlying Interconnect object that this attachment's traffic will traverse through.
* @param interconnect interconnect or {@code null} for none
*/
public InterconnectAttachment setInterconnect(java.lang.String interconnect) {
this.interconnect = interconnect;
return this;
}
/**
* [Output Only] Type of the resource. Always compute#interconnectAttachment for interconnect
* attachments.
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* [Output Only] Type of the resource. Always compute#interconnectAttachment for interconnect
* attachments.
* @param kind kind or {@code null} for none
*/
public InterconnectAttachment setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* A fingerprint for the labels being applied to this InterconnectAttachment, which is essentially
* a hash of the labels set used for optimistic locking. The fingerprint is initially generated by
* Compute Engine and changes after every request to modify or update labels. You must always
* provide an up-to-date fingerprint hash in order to update or change labels, otherwise the
* request will fail with error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an InterconnectAttachment.
* @see #decodeLabelFingerprint()
* @return value or {@code null} for none
*/
public java.lang.String getLabelFingerprint() {
return labelFingerprint;
}
/**
* A fingerprint for the labels being applied to this InterconnectAttachment, which is essentially
* a hash of the labels set used for optimistic locking. The fingerprint is initially generated by
* Compute Engine and changes after every request to modify or update labels. You must always
* provide an up-to-date fingerprint hash in order to update or change labels, otherwise the
* request will fail with error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an InterconnectAttachment.
* @see #getLabelFingerprint()
* @return Base64 decoded value or {@code null} for none
*
* @since 1.14
*/
public byte[] decodeLabelFingerprint() {
return com.google.api.client.util.Base64.decodeBase64(labelFingerprint);
}
/**
* A fingerprint for the labels being applied to this InterconnectAttachment, which is essentially
* a hash of the labels set used for optimistic locking. The fingerprint is initially generated by
* Compute Engine and changes after every request to modify or update labels. You must always
* provide an up-to-date fingerprint hash in order to update or change labels, otherwise the
* request will fail with error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an InterconnectAttachment.
* @see #encodeLabelFingerprint()
* @param labelFingerprint labelFingerprint or {@code null} for none
*/
public InterconnectAttachment setLabelFingerprint(java.lang.String labelFingerprint) {
this.labelFingerprint = labelFingerprint;
return this;
}
/**
* A fingerprint for the labels being applied to this InterconnectAttachment, which is essentially
* a hash of the labels set used for optimistic locking. The fingerprint is initially generated by
* Compute Engine and changes after every request to modify or update labels. You must always
* provide an up-to-date fingerprint hash in order to update or change labels, otherwise the
* request will fail with error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an InterconnectAttachment.
* @see #setLabelFingerprint()
*
* <p>
* The value is encoded Base64 or {@code null} for none.
* </p>
*
* @since 1.14
*/
public InterconnectAttachment encodeLabelFingerprint(byte[] labelFingerprint) {
this.labelFingerprint = com.google.api.client.util.Base64.encodeBase64URLSafeString(labelFingerprint);
return this;
}
/**
* Labels to apply to this InterconnectAttachment resource. These can be later modified by the
* setLabels method. Each label key/value must comply with RFC1035. Label values may be empty.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getLabels() {
return labels;
}
/**
* Labels to apply to this InterconnectAttachment resource. These can be later modified by the
* setLabels method. Each label key/value must comply with RFC1035. Label values may be empty.
* @param labels labels or {@code null} for none
*/
public InterconnectAttachment setLabels(java.util.Map<String, java.lang.String> labels) {
this.labels = labels;
return this;
}
/**
* Name of the resource. Provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Name of the resource. Provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* @param name name or {@code null} for none
*/
public InterconnectAttachment setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* [Output Only] The current status of whether or not this interconnect attachment is functional,
* which can take one of the following values: - OS_ACTIVE: The attachment has been turned up and
* is ready to use. - OS_UNPROVISIONED: The attachment is not ready to use yet, because turnup is
* not complete.
* @return value or {@code null} for none
*/
public java.lang.String getOperationalStatus() {
return operationalStatus;
}
/**
* [Output Only] The current status of whether or not this interconnect attachment is functional,
* which can take one of the following values: - OS_ACTIVE: The attachment has been turned up and
* is ready to use. - OS_UNPROVISIONED: The attachment is not ready to use yet, because turnup is
* not complete.
* @param operationalStatus operationalStatus or {@code null} for none
*/
public InterconnectAttachment setOperationalStatus(java.lang.String operationalStatus) {
this.operationalStatus = operationalStatus;
return this;
}
/**
* [Output only for type PARTNER. Input only for PARTNER_PROVIDER. Not present for DEDICATED]. The
* opaque identifier of an PARTNER attachment used to initiate provisioning with a selected
* partner. Of the form "XXXXX/region/domain"
* @return value or {@code null} for none
*/
public java.lang.String getPairingKey() {
return pairingKey;
}
/**
* [Output only for type PARTNER. Input only for PARTNER_PROVIDER. Not present for DEDICATED]. The
* opaque identifier of an PARTNER attachment used to initiate provisioning with a selected
* partner. Of the form "XXXXX/region/domain"
* @param pairingKey pairingKey or {@code null} for none
*/
public InterconnectAttachment setPairingKey(java.lang.String pairingKey) {
this.pairingKey = pairingKey;
return this;
}
/**
* Optional BGP ASN for the router supplied by a Layer 3 Partner if they configured BGP on behalf
* of the customer. Output only for PARTNER type, input only for PARTNER_PROVIDER, not available
* for DEDICATED.
* @return value or {@code null} for none
*/
public java.lang.Long getPartnerAsn() {
return partnerAsn;
}
/**
* Optional BGP ASN for the router supplied by a Layer 3 Partner if they configured BGP on behalf
* of the customer. Output only for PARTNER type, input only for PARTNER_PROVIDER, not available
* for DEDICATED.
* @param partnerAsn partnerAsn or {@code null} for none
*/
public InterconnectAttachment setPartnerAsn(java.lang.Long partnerAsn) {
this.partnerAsn = partnerAsn;
return this;
}
/**
* Informational metadata about Partner attachments from Partners to display to customers. Output
* only for for PARTNER type, mutable for PARTNER_PROVIDER, not available for DEDICATED.
* @return value or {@code null} for none
*/
public InterconnectAttachmentPartnerMetadata getPartnerMetadata() {
return partnerMetadata;
}
/**
* Informational metadata about Partner attachments from Partners to display to customers. Output
* only for for PARTNER type, mutable for PARTNER_PROVIDER, not available for DEDICATED.
* @param partnerMetadata partnerMetadata or {@code null} for none
*/
public InterconnectAttachment setPartnerMetadata(InterconnectAttachmentPartnerMetadata partnerMetadata) {
this.partnerMetadata = partnerMetadata;
return this;
}
/**
* [Output Only] Information specific to an InterconnectAttachment. This property is populated if
* the interconnect that this is attached to is of type DEDICATED.
* @return value or {@code null} for none
*/
public InterconnectAttachmentPrivateInfo getPrivateInterconnectInfo() {
return privateInterconnectInfo;
}
/**
* [Output Only] Information specific to an InterconnectAttachment. This property is populated if
* the interconnect that this is attached to is of type DEDICATED.
* @param privateInterconnectInfo privateInterconnectInfo or {@code null} for none
*/
public InterconnectAttachment setPrivateInterconnectInfo(InterconnectAttachmentPrivateInfo privateInterconnectInfo) {
this.privateInterconnectInfo = privateInterconnectInfo;
return this;
}
/**
* [Output Only] URL of the region where the regional interconnect attachment resides. You must
* specify this field as part of the HTTP request URL. It is not settable as a field in the
* request body.
* @return value or {@code null} for none
*/
public java.lang.String getRegion() {
return region;
}
/**
* [Output Only] URL of the region where the regional interconnect attachment resides. You must
* specify this field as part of the HTTP request URL. It is not settable as a field in the
* request body.
* @param region region or {@code null} for none
*/
public InterconnectAttachment setRegion(java.lang.String region) {
this.region = region;
return this;
}
/**
* URL of the Cloud Router to be used for dynamic routing. This router must be in the same region
* as this InterconnectAttachment. The InterconnectAttachment will automatically connect the
* Interconnect to the network & region within which the Cloud Router is configured.
* @return value or {@code null} for none
*/
public java.lang.String getRouter() {
return router;
}
/**
* URL of the Cloud Router to be used for dynamic routing. This router must be in the same region
* as this InterconnectAttachment. The InterconnectAttachment will automatically connect the
* Interconnect to the network & region within which the Cloud Router is configured.
* @param router router or {@code null} for none
*/
public InterconnectAttachment setRouter(java.lang.String router) {
this.router = router;
return this;
}
/**
* [Output Only] Server-defined URL for the resource.
* @return value or {@code null} for none
*/
public java.lang.String getSelfLink() {
return selfLink;
}
/**
* [Output Only] Server-defined URL for the resource.
* @param selfLink selfLink or {@code null} for none
*/
public InterconnectAttachment setSelfLink(java.lang.String selfLink) {
this.selfLink = selfLink;
return this;
}
/**
* [Output Only] The current state of this attachment's functionality. Enum values ACTIVE and
* UNPROVISIONED are shared by DEDICATED/PRIVATE, PARTNER, and PARTNER_PROVIDER interconnect
* attachments, while enum values PENDING_PARTNER, PARTNER_REQUEST_RECEIVED, and PENDING_CUSTOMER
* are used for only PARTNER and PARTNER_PROVIDER interconnect attachments. This state can take
* one of the following values: - ACTIVE: The attachment has been turned up and is ready to use.
* - UNPROVISIONED: The attachment is not ready to use yet, because turnup is not complete. -
* PENDING_PARTNER: A newly-created PARTNER attachment that has not yet been configured on the
* Partner side. - PARTNER_REQUEST_RECEIVED: A PARTNER attachment is in the process of
* provisioning after a PARTNER_PROVIDER attachment was created that references it. -
* PENDING_CUSTOMER: A PARTNER or PARTNER_PROVIDER attachment that is waiting for a customer to
* activate it. - DEFUNCT: The attachment was deleted externally and is no longer functional.
* This could be because the associated Interconnect was removed, or because the other side of a
* Partner attachment was deleted.
* @return value or {@code null} for none
*/
public java.lang.String getState() {
return state;
}
/**
* [Output Only] The current state of this attachment's functionality. Enum values ACTIVE and
* UNPROVISIONED are shared by DEDICATED/PRIVATE, PARTNER, and PARTNER_PROVIDER interconnect
* attachments, while enum values PENDING_PARTNER, PARTNER_REQUEST_RECEIVED, and PENDING_CUSTOMER
* are used for only PARTNER and PARTNER_PROVIDER interconnect attachments. This state can take
* one of the following values: - ACTIVE: The attachment has been turned up and is ready to use.
* - UNPROVISIONED: The attachment is not ready to use yet, because turnup is not complete. -
* PENDING_PARTNER: A newly-created PARTNER attachment that has not yet been configured on the
* Partner side. - PARTNER_REQUEST_RECEIVED: A PARTNER attachment is in the process of
* provisioning after a PARTNER_PROVIDER attachment was created that references it. -
* PENDING_CUSTOMER: A PARTNER or PARTNER_PROVIDER attachment that is waiting for a customer to
* activate it. - DEFUNCT: The attachment was deleted externally and is no longer functional.
* This could be because the associated Interconnect was removed, or because the other side of a
* Partner attachment was deleted.
* @param state state or {@code null} for none
*/
public InterconnectAttachment setState(java.lang.String state) {
this.state = state;
return this;
}
/**
* The type of interconnect attachment this is, which can take one of the following values: -
* DEDICATED: an attachment to a Dedicated Interconnect. - PARTNER: an attachment to a Partner
* Interconnect, created by the customer. - PARTNER_PROVIDER: an attachment to a Partner
* Interconnect, created by the partner.
* @return value or {@code null} for none
*/
public java.lang.String getType() {
return type;
}
/**
* The type of interconnect attachment this is, which can take one of the following values: -
* DEDICATED: an attachment to a Dedicated Interconnect. - PARTNER: an attachment to a Partner
* Interconnect, created by the customer. - PARTNER_PROVIDER: an attachment to a Partner
* Interconnect, created by the partner.
* @param type type or {@code null} for none
*/
public InterconnectAttachment setType(java.lang.String type) {
this.type = type;
return this;
}
/**
* The IEEE 802.1Q VLAN tag for this attachment, in the range 2-4094. Only specified at creation
* time.
* @return value or {@code null} for none
*/
public java.lang.Integer getVlanTag8021q() {
return vlanTag8021q;
}
/**
* The IEEE 802.1Q VLAN tag for this attachment, in the range 2-4094. Only specified at creation
* time.
* @param vlanTag8021q vlanTag8021q or {@code null} for none
*/
public InterconnectAttachment setVlanTag8021q(java.lang.Integer vlanTag8021q) {
this.vlanTag8021q = vlanTag8021q;
return this;
}
@Override
public InterconnectAttachment set(String fieldName, Object value) {
return (InterconnectAttachment) super.set(fieldName, value);
}
@Override
public InterconnectAttachment clone() {
return (InterconnectAttachment) super.clone();
}
}
|
apache/jackrabbit | 37,993 | jackrabbit-core/src/main/java/org/apache/jackrabbit/core/observation/EventState.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.core.observation;
import org.apache.jackrabbit.core.SessionImpl;
import org.apache.jackrabbit.core.nodetype.NodeTypeManagerImpl;
import org.apache.jackrabbit.core.id.ItemId;
import org.apache.jackrabbit.core.id.PropertyId;
import org.apache.jackrabbit.core.id.NodeId;
import org.apache.jackrabbit.core.value.InternalValue;
import org.apache.jackrabbit.core.state.ItemStateException;
import org.apache.jackrabbit.spi.Path;
import org.apache.jackrabbit.spi.Name;
import org.apache.jackrabbit.spi.commons.conversion.CachingPathResolver;
import org.apache.jackrabbit.spi.commons.conversion.IllegalNameException;
import org.apache.jackrabbit.spi.commons.conversion.NameResolver;
import org.apache.jackrabbit.spi.commons.conversion.ParsingPathResolver;
import org.apache.jackrabbit.spi.commons.conversion.PathResolver;
import org.apache.jackrabbit.spi.commons.name.PathFactoryImpl;
import javax.jcr.observation.Event;
import org.slf4j.LoggerFactory;
import org.slf4j.Logger;
import javax.jcr.NamespaceException;
import javax.jcr.Session;
import javax.jcr.RepositoryException;
import javax.jcr.nodetype.NoSuchNodeTypeException;
import javax.jcr.nodetype.NodeType;
import java.util.List;
import java.util.Set;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Collections;
import java.util.Map;
import java.util.HashMap;
/**
* The <code>EventState</code> class encapsulates the session
* independent state of an {@link javax.jcr.observation.Event}.
*/
public class EventState {
/**
* The logger instance for this class.
*/
private static final Logger log = LoggerFactory.getLogger(EventState.class);
/**
* The caching path resolver.
*/
private static CachingPathResolver cachingPathResolver;
/**
* The key <code>srcAbsPath</code> in the info map.
*/
static final String SRC_ABS_PATH = "srcAbsPath";
/**
* The key <code>destAbsPath</code> in the info map.
*/
static final String DEST_ABS_PATH = "destAbsPath";
/**
* The key <code>srcChildRelPath</code> in the info map.
*/
static final String SRC_CHILD_REL_PATH = "srcChildRelPath";
/**
* The key <code>destChildRelPath</code> in the info map.
*/
static final String DEST_CHILD_REL_PATH = "destChildRelPath";
/**
* The {@link javax.jcr.observation.Event} of this event.
*/
private final int type;
/**
* The Id of the parent node associated with this event.
*/
private final NodeId parentId;
/**
* The path of the parent node associated with this event.
*/
private final Path parentPath;
/**
* The UUID of a child node, in case this EventState is of type
* {@link javax.jcr.observation.Event#NODE_ADDED} or
* {@link javax.jcr.observation.Event#NODE_REMOVED}.
*/
private final NodeId childId;
/**
* The relative path of the child item associated with this event.
* This is basically the name of the item with an optional index.
*/
private final Path childRelPath;
/**
* The node type name of the parent node.
*/
private final Name nodeType;
/**
* Set of mixin QNames assigned to the parent node.
*/
private final Set<Name> mixins;
/**
* Set of node types. This Set consists of the primary node type and all
* mixin types assigned to the associated parent node of this event state.
* </p>
* This <code>Set</code> is initialized when
* {@link #getNodeTypes(NodeTypeManagerImpl)} is called for the first time.
*/
private Set<NodeType> allTypes;
/**
* The session that caused this event.
*/
private final Session session;
/**
* Cached String representation of this <code>EventState</code>.
*/
private String stringValue;
/**
* Cached hashCode value for this <code>Event</code>.
*/
private int hashCode;
/**
* Flag indicating whether this is an external event, e.g. originating from
* another node in a clustered environment.
*/
private final boolean external;
/**
* The info Map associated with this event.
*/
private Map<String, InternalValue> info = Collections.emptyMap();
/**
* If set to <code>true</code>, indicates that the child node of a node
* added or removed event is a shareable node.
*/
private boolean shareableNode;
/**
* Creates a new <code>EventState</code> instance.
*
* @param type the type of this event.
* @param parentId the id of the parent node associated with this event.
* @param parentPath the path of the parent node associated with this
* event.
* @param childId the id of the child node associated with this event.
* If the event type is one of: <code>PROPERTY_ADDED</code>,
* <code>PROPERTY_CHANGED</code> or <code>PROPERTY_REMOVED</code>
* this parameter must be <code>null</code>.
* @param childPath the relative path of the child item associated with
* this event.
* @param nodeType the node type of the parent node.
* @param mixins mixins assigned to the parent node.
* @param session the {@link javax.jcr.Session} that caused this event.
*/
private EventState(int type, NodeId parentId, Path parentPath,
NodeId childId, Path childPath, Name nodeType,
Set<Name> mixins, Session session, boolean external) {
int mask = (Event.PROPERTY_ADDED | Event.PROPERTY_CHANGED | Event.PROPERTY_REMOVED);
if ((type & mask) > 0) {
if (childId != null) {
throw new IllegalArgumentException("childId only allowed for Node events.");
}
} else {
if (childId == null && type != Event.PERSIST) {
throw new IllegalArgumentException("childId must not be null for Node events.");
}
}
this.type = type;
this.parentId = parentId;
this.parentPath = parentPath;
this.childId = childId;
this.childRelPath = childPath;
this.nodeType = nodeType;
this.mixins = mixins;
this.session = session;
this.external = external;
}
//-----------------< factory methods >--------------------------------------
/**
* Creates a new {@link javax.jcr.observation.Event} of type
* {@link javax.jcr.observation.Event#NODE_ADDED}.
*
* @param parentId the id of the parent node associated with
* this <code>EventState</code>.
* @param parentPath the path of the parent node associated with
* this <code>EventState</code>.
* @param childId the id of the child node associated with this event.
* @param childPath the relative path of the child node that was added.
* @param nodeType the node type of the parent node.
* @param mixins mixins assigned to the parent node.
* @param session the session that added the node.
* @return an <code>EventState</code> instance.
*/
public static EventState childNodeAdded(NodeId parentId,
Path parentPath,
NodeId childId,
Path childPath,
Name nodeType,
Set<Name> mixins,
Session session) {
return childNodeAdded(parentId, parentPath, childId,
childPath, nodeType, mixins, session, false);
}
/**
* Creates a new {@link javax.jcr.observation.Event} of type
* {@link javax.jcr.observation.Event#NODE_ADDED}.
*
* @param parentId the id of the parent node associated with
* this <code>EventState</code>.
* @param parentPath the path of the parent node associated with
* this <code>EventState</code>.
* @param childId the id of the child node associated with this event.
* @param childPath the relative path of the child node that was added.
* @param nodeType the node type of the parent node.
* @param mixins mixins assigned to the parent node.
* @param session the session that added the node.
* @param external flag indicating whether this is an external event
* @return an <code>EventState</code> instance.
*/
public static EventState childNodeAdded(NodeId parentId,
Path parentPath,
NodeId childId,
Path childPath,
Name nodeType,
Set<Name> mixins,
Session session,
boolean external) {
return new EventState(Event.NODE_ADDED, parentId, parentPath,
childId, childPath, nodeType, mixins, session, external);
}
/**
* Creates a new {@link javax.jcr.observation.Event} of type
* {@link javax.jcr.observation.Event#NODE_REMOVED}.
*
* @param parentId the id of the parent node associated with
* this <code>EventState</code>.
* @param parentPath the path of the parent node associated with
* this <code>EventState</code>.
* @param childId the id of the child node associated with this event.
* @param childPath the relative path of the child node that was removed.
* @param nodeType the node type of the parent node.
* @param mixins mixins assigned to the parent node.
* @param session the session that removed the node.
* @return an <code>EventState</code> instance.
*/
public static EventState childNodeRemoved(NodeId parentId,
Path parentPath,
NodeId childId,
Path childPath,
Name nodeType,
Set<Name> mixins,
Session session) {
return childNodeRemoved(parentId, parentPath, childId,
childPath, nodeType, mixins, session, false);
}
/**
* Creates a new {@link javax.jcr.observation.Event} of type
* {@link javax.jcr.observation.Event#NODE_REMOVED}.
*
* @param parentId the id of the parent node associated with
* this <code>EventState</code>.
* @param parentPath the path of the parent node associated with
* this <code>EventState</code>.
* @param childId the id of the child node associated with this event.
* @param childPath the relative path of the child node that was removed.
* @param nodeType the node type of the parent node.
* @param mixins mixins assigned to the parent node.
* @param session the session that removed the node.
* @param external flag indicating whether this is an external event
* @return an <code>EventState</code> instance.
*/
public static EventState childNodeRemoved(NodeId parentId,
Path parentPath,
NodeId childId,
Path childPath,
Name nodeType,
Set<Name> mixins,
Session session,
boolean external) {
return new EventState(Event.NODE_REMOVED, parentId, parentPath,
childId, childPath, nodeType, mixins, session, external);
}
/**
* Creates a new {@link javax.jcr.observation.Event} of type
* <code>NODE_MOVED</code>. The parent node associated with this event type
* is the parent node of the destination of the move!
* This method creates an event state without an info map. A caller of this
* method must ensure that it is properly set afterwards.
*
* @param parentId the id of the parent node associated with
* this <code>EventState</code>.
* @param parentPath the path of the parent node associated with
* this <code>EventState</code>.
* @param childId the id of the child node associated with this event.
* @param childPath the relative path of the child node that was moved.
* @param nodeType the node type of the parent node.
* @param mixins mixins assigned to the parent node.
* @param session the session that moved the node.
* @param external flag indicating whether this is an external event
* @return an <code>EventState</code> instance.
*/
public static EventState nodeMoved(NodeId parentId,
Path parentPath,
NodeId childId,
Path childPath,
Name nodeType,
Set<Name> mixins,
Session session,
boolean external) {
return new EventState(Event.NODE_MOVED, parentId, parentPath,
childId, childPath, nodeType, mixins, session, external);
}
/**
* Creates a new {@link javax.jcr.observation.Event} of type
* <code>NODE_MOVED</code>. The parent node associated with this event type
* is the parent node of the destination of the move!
*
* @param parentId the id of the parent node associated with this
* <code>EventState</code>.
* @param destPath the path of the destination of the move.
* @param childId the id of the child node associated with this event.
* @param srcPath the path of the source of the move.
* @param nodeType the node type of the parent node.
* @param mixins mixins assigned to the parent node.
* @param session the session that removed the node.
* @param external flag indicating whether this is an external event
* @return an <code>EventState</code> instance.
* @throws ItemStateException if <code>destPath</code> does not have a
* parent.
*/
public static EventState nodeMovedWithInfo(
NodeId parentId, Path destPath, NodeId childId, Path srcPath,
Name nodeType, Set<Name> mixins, Session session, boolean external)
throws ItemStateException {
try {
EventState es = nodeMoved(parentId, destPath.getAncestor(1),
childId, destPath, nodeType, mixins,
session, external);
Map<String, InternalValue> info = new HashMap<String, InternalValue>();
info.put(SRC_ABS_PATH, InternalValue.create(srcPath));
info.put(DEST_ABS_PATH, InternalValue.create(destPath));
es.setInfo(info);
return es;
} catch (RepositoryException e) {
// should never happen actually
String msg = "Unable to resolve parent for path: " + destPath;
log.error(msg);
throw new ItemStateException(msg, e);
}
}
/**
* Creates a new {@link javax.jcr.observation.Event} of type
* <code>NODE_MOVED</code>. The parent node associated with this event type
* is the parent node of the destination of the reorder!
*
* @param parentId the id of the parent node associated with this
* <code>EventState</code>.
* @param parentPath the path of the parent node associated with
* this <code>EventState</code>.
* @param childId the id of the child node associated with this
* event.
* @param destChildPath the name element of the node before it was reordered.
* @param srcChildPath the name element of the reordered node before the
* reorder operation.
* @param beforeChildPath the name element of the node before which the
* reordered node is placed. (may be <code>null</code>
* if reordered to the end.
* @param nodeType the node type of the parent node.
* @param mixins mixins assigned to the parent node.
* @param session the session that removed the node.
* @param external flag indicating whether this is an external event
* @return an <code>EventState</code> instance.
*/
public static EventState nodeReordered(NodeId parentId,
Path parentPath,
NodeId childId,
Path destChildPath,
Path srcChildPath,
Path beforeChildPath,
Name nodeType,
Set<Name> mixins,
Session session,
boolean external) {
EventState es = nodeMoved(
parentId, parentPath, childId, destChildPath,
nodeType, mixins, session, external);
Map<String, InternalValue> info = new HashMap<String, InternalValue>();
info.put(SRC_CHILD_REL_PATH, createValue(srcChildPath));
InternalValue value = null;
if (beforeChildPath != null) {
value = createValue(beforeChildPath);
}
info.put(DEST_CHILD_REL_PATH, value);
es.setInfo(info);
return es;
}
/**
* Creates a new {@link javax.jcr.observation.Event} of type
* {@link javax.jcr.observation.Event#PROPERTY_ADDED}.
*
* @param parentId the id of the parent node associated with
* this <code>EventState</code>.
* @param parentPath the path of the parent node associated with
* this <code>EventState</code>.
* @param childPath the relative path of the property that was added.
* @param nodeType the node type of the parent node.
* @param mixins mixins assigned to the parent node.
* @param session the session that added the property.
* @return an <code>EventState</code> instance.
*/
public static EventState propertyAdded(NodeId parentId,
Path parentPath,
Path childPath,
Name nodeType,
Set<Name> mixins,
Session session) {
return propertyAdded(parentId, parentPath, childPath,
nodeType, mixins, session, false);
}
/**
* Creates a new {@link javax.jcr.observation.Event} of type
* {@link javax.jcr.observation.Event#PROPERTY_ADDED}.
*
* @param parentId the id of the parent node associated with
* this <code>EventState</code>.
* @param parentPath the path of the parent node associated with
* this <code>EventState</code>.
* @param childPath the relative path of the property that was added.
* @param nodeType the node type of the parent node.
* @param mixins mixins assigned to the parent node.
* @param session the session that added the property.
* @param external flag indicating whether this is an external event
* @return an <code>EventState</code> instance.
*/
public static EventState propertyAdded(NodeId parentId,
Path parentPath,
Path childPath,
Name nodeType,
Set<Name> mixins,
Session session,
boolean external) {
return new EventState(Event.PROPERTY_ADDED, parentId, parentPath,
null, childPath, nodeType, mixins, session, external);
}
/**
* Creates a new {@link javax.jcr.observation.Event} of type
* {@link javax.jcr.observation.Event#PROPERTY_REMOVED}.
*
* @param parentId the id of the parent node associated with
* this <code>EventState</code>.
* @param parentPath the path of the parent node associated with
* this <code>EventState</code>.
* @param childPath the relative path of the property that was removed.
* @param nodeType the node type of the parent node.
* @param mixins mixins assigned to the parent node.
* @param session the session that removed the property.
* @return an <code>EventState</code> instance.
*/
public static EventState propertyRemoved(NodeId parentId,
Path parentPath,
Path childPath,
Name nodeType,
Set<Name> mixins,
Session session) {
return propertyRemoved(parentId, parentPath, childPath,
nodeType, mixins, session, false);
}
/**
* Creates a new {@link javax.jcr.observation.Event} of type
* {@link javax.jcr.observation.Event#PROPERTY_REMOVED}.
*
* @param parentId the id of the parent node associated with
* this <code>EventState</code>.
* @param parentPath the path of the parent node associated with
* this <code>EventState</code>.
* @param childPath the relative path of the property that was removed.
* @param nodeType the node type of the parent node.
* @param mixins mixins assigned to the parent node.
* @param session the session that removed the property.
* @param external flag indicating whether this is an external event
* @return an <code>EventState</code> instance.
*/
public static EventState propertyRemoved(NodeId parentId,
Path parentPath,
Path childPath,
Name nodeType,
Set<Name> mixins,
Session session,
boolean external) {
return new EventState(Event.PROPERTY_REMOVED, parentId, parentPath,
null, childPath, nodeType, mixins, session, external);
}
/**
* Creates a new {@link javax.jcr.observation.Event} of type
* {@link javax.jcr.observation.Event#PROPERTY_CHANGED}.
*
* @param parentId the id of the parent node associated with
* this <code>EventState</code>.
* @param parentPath the path of the parent node associated with
* this <code>EventState</code>.
* @param childPath the relative path of the property that changed.
* @param nodeType the node type of the parent node.
* @param mixins mixins assigned to the parent node.
* @param session the session that changed the property.
* @return an <code>EventState</code> instance.
*/
public static EventState propertyChanged(NodeId parentId,
Path parentPath,
Path childPath,
Name nodeType,
Set<Name> mixins,
Session session) {
return propertyChanged(parentId, parentPath, childPath,
nodeType, mixins, session, false);
}
/**
* Creates a new {@link javax.jcr.observation.Event} of type
* {@link javax.jcr.observation.Event#PROPERTY_CHANGED}.
*
* @param parentId the id of the parent node associated with
* this <code>EventState</code>.
* @param parentPath the path of the parent node associated with
* this <code>EventState</code>.
* @param childPath the relative path of the property that changed.
* @param nodeType the node type of the parent node.
* @param mixins mixins assigned to the parent node.
* @param session the session that changed the property.
* @param external flag indicating whether this is an external event
* @return an <code>EventState</code> instance.
*/
public static EventState propertyChanged(NodeId parentId,
Path parentPath,
Path childPath,
Name nodeType,
Set<Name> mixins,
Session session,
boolean external) {
return new EventState(Event.PROPERTY_CHANGED, parentId, parentPath,
null, childPath, nodeType, mixins, session, external);
}
/**
* Creates a new {@link javax.jcr.observation.Event} of type
* {@link javax.jcr.observation.Event#PERSIST}.
*
* @param session the session that changed the property.
* @param external flag indicating whether this is an external event
* @return an <code>EventState</code> instance.
*/
public static EventState persist(Session session, boolean external) {
return new EventState(Event.PERSIST, null, null, null, null,
null, null, session, external);
}
/**
* {@inheritDoc}
*/
public int getType() {
return type;
}
/**
* Returns the uuid of the parent node.
*
* @return the uuid of the parent node.
*/
public NodeId getParentId() {
return parentId;
}
/**
* Returns the path of the parent node.
*
* @return the path of the parent node.
*/
public Path getParentPath() {
return parentPath;
}
/**
* Returns the Id of a child node operation.
* If this <code>EventState</code> was generated for a property
* operation this method returns <code>null</code>.
*
* @return the id of a child node operation.
*/
public NodeId getChildId() {
return childId;
}
/**
* Returns the relative {@link Path} of the child
* {@link javax.jcr.Item} associated with this event.
*
* @return the <code>Path</code> associated with this event.
*/
public Path getChildRelPath() {
return childRelPath;
}
/**
* Returns the node type of the parent node associated with this event.
*
* @return the node type of the parent associated with this event.
*/
public Name getNodeType() {
return nodeType;
}
/**
* Returns a set of <code>Name</code>s which are the names of the mixins
* assigned to the parent node associated with this event.
*
* @return the mixin names as <code>Name</code>s.
*/
public Set<Name> getMixinNames() {
return mixins;
}
/**
* Returns the <code>Set</code> of {@link javax.jcr.nodetype.NodeType}s
* assigned to the parent node associated with this event. This
* <code>Set</code> includes the primary type as well as all the mixin types
* assigned to the parent node.
*
* @return <code>Set</code> of {@link javax.jcr.nodetype.NodeType}s.
*/
public Set<NodeType> getNodeTypes(NodeTypeManagerImpl ntMgr) {
if (allTypes == null) {
Set<NodeType> tmp = new HashSet<NodeType>();
try {
tmp.add(ntMgr.getNodeType(nodeType));
} catch (NoSuchNodeTypeException e) {
log.warn("Unknown node type: " + nodeType);
}
Iterator<Name> it = mixins.iterator();
while (it.hasNext()) {
Name mixinName = it.next();
try {
tmp.add(ntMgr.getNodeType(mixinName));
} catch (NoSuchNodeTypeException e) {
log.warn("Unknown node type: " + mixinName);
}
}
allTypes = Collections.unmodifiableSet(tmp);
}
return allTypes;
}
/**
* {@inheritDoc}
*/
public String getUserId() {
return session.getUserID();
}
/**
* Returns the <code>Session</code> that caused / created this
* <code>EventState</code>.
*
* @return the <code>Session</code> that caused / created this
* <code>EventState</code>.
*/
Session getSession() {
return session;
}
/**
* Returns the id of the associated item of this <code>EventState</code>.
*
* @return the <code>ItemId</code> or <code>null</code> for {@link Event#PERSIST} events
*/
ItemId getTargetId() {
if (type == Event.PERSIST) {
return null;
} else if (childId == null) {
// property event
return new PropertyId(parentId, childRelPath.getName());
} else {
// node event
return childId;
}
}
/**
* Return a flag indicating whether this is an externally generated event.
*
* @return <code>true</code> if this is an external event;
* <code>false</code> otherwise
*/
boolean isExternal() {
return external;
}
/**
* @return an unmodifiable info Map.
*/
public Map<String, InternalValue> getInfo() {
return info;
}
/**
* Sets a new info map for this event.
*
* @param info the new info map.
*/
public void setInfo(Map<String, InternalValue> info) {
this.info = Collections.unmodifiableMap(new HashMap<String, InternalValue>(info));
}
/**
* Returns a flag indicating whether the child node of this event is a
* shareable node. Only applies to node added/removed events.
*
* @return <code>true</code> for a shareable child node, <code>false</code>
* otherwise.
*/
boolean isShareableNode() {
return shareableNode;
}
/**
* Sets a new value for the {@link #shareableNode} flag.
*
* @param shareableNode whether the child node is shareable.
* @see #isShareableNode()
*/
void setShareableNode(boolean shareableNode) {
this.shareableNode = shareableNode;
}
/**
* Returns a String representation of this <code>EventState</code>.
*
* @return a String representation of this <code>EventState</code>.
*/
public String toString() {
if (stringValue == null) {
StringBuilder sb = new StringBuilder();
sb.append("EventState: ").append(valueOf(type));
sb.append(", Parent: ").append(parentId);
sb.append(", Child: ").append(childRelPath);
sb.append(", UserId: ").append(session.getUserID());
sb.append(", Info: ").append(info);
stringValue = sb.toString();
}
return stringValue;
}
/**
* Returns a hashCode for this <code>EventState</code>.
*
* @return a hashCode for this <code>EventState</code>.
*/
public int hashCode() {
int h = hashCode;
if (h == 0) {
h = 37;
h = 37 * h + type;
h = 37 * h + (parentId != null ? parentId.hashCode() : 0);
h = 37 * h + (childRelPath != null ? childRelPath.hashCode() : 0);
h = 37 * h + session.hashCode();
h = 37 * h + info.hashCode();
hashCode = h;
}
return hashCode;
}
/**
* Returns <code>true</code> if this <code>EventState</code> is equal to
* another object.
*
* @param obj the reference object with which to compare.
* @return <code>true</code> if object <code>obj</code> is equal to this
* <code>EventState</code>; <code>false</code> otherwise.
*/
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj instanceof EventState) {
EventState other = (EventState) obj;
return this.type == other.type
&& this.parentId.equals(other.parentId)
&& this.childRelPath.equals(other.childRelPath)
&& this.session.equals(other.session)
&& this.info.equals(other.info);
}
return false;
}
/**
* Returns a String representation of <code>eventType</code>.
*
* @param eventType an event type defined by {@link Event}.
* @return a String representation of <code>eventType</code>.
*/
public static String valueOf(int eventType) {
if (eventType == Event.NODE_ADDED) {
return "NodeAdded";
} else if (eventType == Event.NODE_MOVED) {
return "NodeMoved";
} else if (eventType == Event.NODE_REMOVED) {
return "NodeRemoved";
} else if (eventType == Event.PROPERTY_ADDED) {
return "PropertyAdded";
} else if (eventType == Event.PROPERTY_CHANGED) {
return "PropertyChanged";
} else if (eventType == Event.PROPERTY_REMOVED) {
return "PropertyRemoved";
} else if (eventType == Event.PERSIST) {
return "Persist";
} else {
return "UnknownEventType";
}
}
/**
* Creates an internal path value from the given path.
*
* @param path the path
* @return an internal value wrapping the path
*/
private static InternalValue createValue(Path path) {
return InternalValue.create(path);
}
/**
* Get the longest common path of all event state paths.
*
* @param events The list of EventState
* @param session The associated session; it can be null
* @return the longest common path
*/
public static String getCommonPath(List<EventState> events, SessionImpl session) {
String common = null;
try {
for (int i = 0; i < events.size(); i++) {
EventState state = events.get(i);
Path parentPath = state.getParentPath();
String s;
if (session == null) {
s = getJCRPath(parentPath);
} else {
s = session.getJCRPath(parentPath);
}
if (common == null) {
common = s;
} else if (!common.equals(s)) {
// Assign the shorter path to common.
if (s.length() < common.length()) {
String temp = common;
common = s;
s = temp;
}
// Find the real common.
while (!s.startsWith(common)) {
int idx = s.lastIndexOf('/');
if (idx < 0) {
break;
}
common = s.substring(0, idx + 1);
}
}
}
} catch (NamespaceException e) {
log.debug("Problem in retrieving JCR path", e);
}
return common;
}
private static String getJCRPath(Path path) {
setupCachingPathResolver();
String jcrPath;
try {
jcrPath = cachingPathResolver.getJCRPath(path);
} catch (NamespaceException e) {
jcrPath = "";
log.debug("Problem in retrieving JCR path", e);
}
return jcrPath;
}
private static void setupCachingPathResolver() {
if (cachingPathResolver != null) {
return;
}
PathResolver pathResolver = new ParsingPathResolver(PathFactoryImpl.getInstance(), new NameResolver() {
public Name getQName(String name) throws IllegalNameException, NamespaceException {
return null;
}
public String getJCRName(Name name) throws NamespaceException {
return name.getLocalName();
}
});
cachingPathResolver = new CachingPathResolver(pathResolver);
}
}
|
googleapis/google-cloud-java | 37,736 | java-dialogflow/proto-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/ListEntityTypesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2beta1/entity_type.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2beta1;
/**
*
*
* <pre>
* The request message for
* [EntityTypes.ListEntityTypes][google.cloud.dialogflow.v2beta1.EntityTypes.ListEntityTypes].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.ListEntityTypesRequest}
*/
public final class ListEntityTypesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2beta1.ListEntityTypesRequest)
ListEntityTypesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListEntityTypesRequest.newBuilder() to construct.
private ListEntityTypesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListEntityTypesRequest() {
parent_ = "";
languageCode_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListEntityTypesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.EntityTypeProto
.internal_static_google_cloud_dialogflow_v2beta1_ListEntityTypesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.EntityTypeProto
.internal_static_google_cloud_dialogflow_v2beta1_ListEntityTypesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest.class,
com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The agent to list all entity types from.
* Supported formats:
* - `projects/<Project ID>/agent`
* - `projects/<Project ID>/locations/<Location ID>/agent`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The agent to list all entity types from.
* Supported formats:
* - `projects/<Project ID>/agent`
* - `projects/<Project ID>/locations/<Location ID>/agent`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LANGUAGE_CODE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object languageCode_ = "";
/**
*
*
* <pre>
* Optional. The language used to access language-specific data.
* If not specified, the agent's default language is used.
* For more information, see
* [Multilingual intent and entity
* data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity).
* </pre>
*
* <code>string language_code = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The languageCode.
*/
@java.lang.Override
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The language used to access language-specific data.
* If not specified, the agent's default language is used.
* For more information, see
* [Multilingual intent and entity
* data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity).
* </pre>
*
* <code>string language_code = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for languageCode.
*/
@java.lang.Override
public com.google.protobuf.ByteString getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By
* default 100 and at most 1000.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, languageCode_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, languageCode_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest other =
(com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getLanguageCode().equals(other.getLanguageCode())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER;
hash = (53 * hash) + getLanguageCode().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [EntityTypes.ListEntityTypes][google.cloud.dialogflow.v2beta1.EntityTypes.ListEntityTypes].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.ListEntityTypesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2beta1.ListEntityTypesRequest)
com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.EntityTypeProto
.internal_static_google_cloud_dialogflow_v2beta1_ListEntityTypesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.EntityTypeProto
.internal_static_google_cloud_dialogflow_v2beta1_ListEntityTypesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest.class,
com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
languageCode_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2beta1.EntityTypeProto
.internal_static_google_cloud_dialogflow_v2beta1_ListEntityTypesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest build() {
com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest buildPartial() {
com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest result =
new com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.languageCode_ = languageCode_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest) {
return mergeFrom((com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest other) {
if (other == com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getLanguageCode().isEmpty()) {
languageCode_ = other.languageCode_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
languageCode_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The agent to list all entity types from.
* Supported formats:
* - `projects/<Project ID>/agent`
* - `projects/<Project ID>/locations/<Location ID>/agent`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The agent to list all entity types from.
* Supported formats:
* - `projects/<Project ID>/agent`
* - `projects/<Project ID>/locations/<Location ID>/agent`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The agent to list all entity types from.
* Supported formats:
* - `projects/<Project ID>/agent`
* - `projects/<Project ID>/locations/<Location ID>/agent`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The agent to list all entity types from.
* Supported formats:
* - `projects/<Project ID>/agent`
* - `projects/<Project ID>/locations/<Location ID>/agent`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The agent to list all entity types from.
* Supported formats:
* - `projects/<Project ID>/agent`
* - `projects/<Project ID>/locations/<Location ID>/agent`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object languageCode_ = "";
/**
*
*
* <pre>
* Optional. The language used to access language-specific data.
* If not specified, the agent's default language is used.
* For more information, see
* [Multilingual intent and entity
* data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity).
* </pre>
*
* <code>string language_code = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The languageCode.
*/
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The language used to access language-specific data.
* If not specified, the agent's default language is used.
* For more information, see
* [Multilingual intent and entity
* data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity).
* </pre>
*
* <code>string language_code = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for languageCode.
*/
public com.google.protobuf.ByteString getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The language used to access language-specific data.
* If not specified, the agent's default language is used.
* For more information, see
* [Multilingual intent and entity
* data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity).
* </pre>
*
* <code>string language_code = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCode(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
languageCode_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The language used to access language-specific data.
* If not specified, the agent's default language is used.
* For more information, see
* [Multilingual intent and entity
* data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity).
* </pre>
*
* <code>string language_code = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearLanguageCode() {
languageCode_ = getDefaultInstance().getLanguageCode();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The language used to access language-specific data.
* If not specified, the agent's default language is used.
* For more information, see
* [Multilingual intent and entity
* data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity).
* </pre>
*
* <code>string language_code = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCodeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
languageCode_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By
* default 100 and at most 1000.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By
* default 100 and at most 1000.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By
* default 100 and at most 1000.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2beta1.ListEntityTypesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2beta1.ListEntityTypesRequest)
private static final com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest();
}
public static com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListEntityTypesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListEntityTypesRequest>() {
@java.lang.Override
public ListEntityTypesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListEntityTypesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListEntityTypesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.ListEntityTypesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8 | 37,723 | jaxp/src/com/sun/org/apache/xerces/internal/impl/dv/xs/AbstractDateTimeDV.java | /*
* reserved comment block
* DO NOT REMOVE OR ALTER!
*/
/*
* Copyright 1999-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.org.apache.xerces.internal.impl.dv.xs;
import java.math.BigDecimal;
import javax.xml.datatype.DatatypeFactory;
import javax.xml.datatype.Duration;
import javax.xml.datatype.XMLGregorianCalendar;
import com.sun.org.apache.xerces.internal.impl.Constants;
import com.sun.org.apache.xerces.internal.jaxp.datatype.DatatypeFactoryImpl;
import com.sun.org.apache.xerces.internal.xs.datatypes.XSDateTime;
/**
* This is the base class of all date/time datatype validators.
* It implements common code for parsing, validating and comparing datatypes.
* Classes that extend this class, must implement parse() method.
*
* REVISIT: There are many instance variables, which would cause problems
* when we support grammar caching. A grammar is possibly used by
* two parser instances at the same time, then the same simple type
* decl object can be used to validate two strings at the same time.
* -SG
*
* @xerces.internal
*
* @author Elena Litani
* @author Len Berman
* @author Gopal Sharma, SUN Microsystems Inc.
*
* @version $Id: AbstractDateTimeDV.java,v 1.7 2010-11-01 04:39:46 joehw Exp $
*/
public abstract class AbstractDateTimeDV extends TypeValidator {
//debugging
private static final boolean DEBUG = false;
//define shared variables for date/time
//define constants to be used in assigning default values for
//all date/time excluding duration
protected final static int YEAR = 2000;
protected final static int MONTH = 01;
protected final static int DAY = 01;
protected static final DatatypeFactory datatypeFactory = new DatatypeFactoryImpl();
@Override
public short getAllowedFacets() {
return (XSSimpleTypeDecl.FACET_PATTERN | XSSimpleTypeDecl.FACET_WHITESPACE | XSSimpleTypeDecl.FACET_ENUMERATION | XSSimpleTypeDecl.FACET_MAXINCLUSIVE | XSSimpleTypeDecl.FACET_MININCLUSIVE | XSSimpleTypeDecl.FACET_MAXEXCLUSIVE | XSSimpleTypeDecl.FACET_MINEXCLUSIVE);
}//getAllowedFacets()
// distinguishes between identity and equality for date/time values
// ie: two values representing the same "moment in time" but with different
// remembered timezones are now equal but not identical.
@Override
public boolean isIdentical(Object value1, Object value2) {
if (!(value1 instanceof DateTimeData) || !(value2 instanceof DateTimeData)) {
return false;
}
DateTimeData v1 = (DateTimeData) value1;
DateTimeData v2 = (DateTimeData) value2;
// original timezones must be the same in addition to date/time values
// being 'equal'
if ((v1.timezoneHr == v2.timezoneHr) && (v1.timezoneMin == v2.timezoneMin)) {
return v1.equals(v2);
}
return false;
}//isIdentical()
// the parameters are in compiled form (from getActualValue)
@Override
public int compare(Object value1, Object value2) {
return compareDates(((DateTimeData) value1),
((DateTimeData) value2), true);
}//compare()
/**
* Compare algorithm described in dateDime (3.2.7). Duration datatype
* overwrites this method
*
* @param date1 normalized date representation of the first value
* @param date2 normalized date representation of the second value
* @param strict
* @return less, greater, less_equal, greater_equal, equal
*/
protected short compareDates(DateTimeData date1, DateTimeData date2, boolean strict) {
if (date1.utc == date2.utc) {
return compareOrder(date1, date2);
}
short c1, c2;
DateTimeData tempDate = new DateTimeData(null, this);
if (date1.utc == 'Z') {
//compare date1<=date1<=(date2 with time zone -14)
//
cloneDate(date2, tempDate); //clones date1 value to global temporary storage: fTempDate
tempDate.timezoneHr = 14;
tempDate.timezoneMin = 0;
tempDate.utc = '+';
normalize(tempDate);
c1 = compareOrder(date1, tempDate);
if (c1 == LESS_THAN) {
return c1;
}
//compare date1>=(date2 with time zone +14)
//
cloneDate(date2, tempDate); //clones date1 value to global temporary storage: tempDate
tempDate.timezoneHr = -14;
tempDate.timezoneMin = 0;
tempDate.utc = '-';
normalize(tempDate);
c2 = compareOrder(date1, tempDate);
if (c2 == GREATER_THAN) {
return c2;
}
return INDETERMINATE;
} else if (date2.utc == 'Z') {
//compare (date1 with time zone -14)<=date2
//
cloneDate(date1, tempDate); //clones date1 value to global temporary storage: tempDate
tempDate.timezoneHr = -14;
tempDate.timezoneMin = 0;
tempDate.utc = '-';
if (DEBUG) {
System.out.println("tempDate=" + dateToString(tempDate));
}
normalize(tempDate);
c1 = compareOrder(tempDate, date2);
if (DEBUG) {
System.out.println("date=" + dateToString(date2));
System.out.println("tempDate=" + dateToString(tempDate));
}
if (c1 == LESS_THAN) {
return c1;
}
//compare (date1 with time zone +14)<=date2
//
cloneDate(date1, tempDate); //clones date1 value to global temporary storage: tempDate
tempDate.timezoneHr = 14;
tempDate.timezoneMin = 0;
tempDate.utc = '+';
normalize(tempDate);
c2 = compareOrder(tempDate, date2);
if (DEBUG) {
System.out.println("tempDate=" + dateToString(tempDate));
}
if (c2 == GREATER_THAN) {
return c2;
}
return INDETERMINATE;
}
return INDETERMINATE;
}
/**
* Given normalized values, determines order-relation between give date/time
* objects.
*
* @param date1 date/time object
* @param date2 date/time object
* @return 0 if date1 and date2 are equal, a value less than 0 if date1 is
* less than date2, a value greater than 0 if date1 is greater than date2
*/
protected short compareOrder(DateTimeData date1, DateTimeData date2) {
if (date1.position < 1) {
if (date1.year < date2.year) {
return -1;
}
if (date1.year > date2.year) {
return 1;
}
}
if (date1.position < 2) {
if (date1.month < date2.month) {
return -1;
}
if (date1.month > date2.month) {
return 1;
}
}
if (date1.day < date2.day) {
return -1;
}
if (date1.day > date2.day) {
return 1;
}
if (date1.hour < date2.hour) {
return -1;
}
if (date1.hour > date2.hour) {
return 1;
}
if (date1.minute < date2.minute) {
return -1;
}
if (date1.minute > date2.minute) {
return 1;
}
if (date1.second < date2.second) {
return -1;
}
if (date1.second > date2.second) {
return 1;
}
if (date1.utc < date2.utc) {
return -1;
}
if (date1.utc > date2.utc) {
return 1;
}
return 0;
}
/**
* Parses time hh:mm:ss.sss and time zone if any
*
* @param start
* @param end
* @param data
* @exception RuntimeException
*/
protected void getTime(String buffer, int start, int end, DateTimeData data) throws RuntimeException {
int stop = start + 2;
//get hours (hh)
data.hour = parseInt(buffer, start, stop);
//get minutes (mm)
if (buffer.charAt(stop++) != ':') {
throw new RuntimeException("Error in parsing time zone");
}
start = stop;
stop = stop + 2;
data.minute = parseInt(buffer, start, stop);
//get seconds (ss)
if (buffer.charAt(stop++) != ':') {
throw new RuntimeException("Error in parsing time zone");
}
//find UTC sign if any
int sign = findUTCSign(buffer, start, end);
//get seconds (ms)
start = stop;
stop = sign < 0 ? end : sign;
data.second = parseSecond(buffer, start, stop);
//parse UTC time zone (hh:mm)
if (sign > 0) {
getTimeZone(buffer, data, sign, end);
}
}
/**
* Parses date CCYY-MM-DD
*
* @param buffer
* @param start start position
* @param end end position
* @param date
* @exception RuntimeException
*/
protected int getDate(String buffer, int start, int end, DateTimeData date) throws RuntimeException {
start = getYearMonth(buffer, start, end, date);
if (buffer.charAt(start++) != '-') {
throw new RuntimeException("CCYY-MM must be followed by '-' sign");
}
int stop = start + 2;
date.day = parseInt(buffer, start, stop);
return stop;
}
/**
* Parses date CCYY-MM
*
* @param buffer
* @param start start position
* @param end end position
* @param date
* @exception RuntimeException
*/
protected int getYearMonth(String buffer, int start, int end, DateTimeData date) throws RuntimeException {
if (buffer.charAt(0) == '-') {
// REVISIT: date starts with preceding '-' sign
// do we have to do anything with it?
//
start++;
}
int i = indexOf(buffer, start, end, '-');
if (i == -1) {
throw new RuntimeException("Year separator is missing or misplaced");
}
int length = i - start;
if (length < 4) {
throw new RuntimeException("Year must have 'CCYY' format");
} else if (length > 4 && buffer.charAt(start) == '0') {
throw new RuntimeException("Leading zeros are required if the year value would otherwise have fewer than four digits; otherwise they are forbidden");
}
date.year = parseIntYear(buffer, i);
if (buffer.charAt(i) != '-') {
throw new RuntimeException("CCYY must be followed by '-' sign");
}
start = ++i;
i = start + 2;
date.month = parseInt(buffer, start, i);
return i; //fStart points right after the MONTH
}
/**
* Shared code from Date and YearMonth datatypes. Finds if time zone sign is
* present
*
* @param end
* @param date
* @exception RuntimeException
*/
protected void parseTimeZone(String buffer, int start, int end, DateTimeData date) throws RuntimeException {
//fStart points right after the date
if (start < end) {
if (!isNextCharUTCSign(buffer, start, end)) {
throw new RuntimeException("Error in month parsing");
} else {
getTimeZone(buffer, date, start, end);
}
}
}
/**
* Parses time zone: 'Z' or {+,-} followed by hh:mm
*
* @param data
* @param sign
* @exception RuntimeException
*/
protected void getTimeZone(String buffer, DateTimeData data, int sign, int end) throws RuntimeException {
data.utc = buffer.charAt(sign);
if (buffer.charAt(sign) == 'Z') {
if (end > (++sign)) {
throw new RuntimeException("Error in parsing time zone");
}
return;
}
if (sign <= (end - 6)) {
int negate = buffer.charAt(sign) == '-' ? -1 : 1;
//parse hr
int stop = ++sign + 2;
data.timezoneHr = negate * parseInt(buffer, sign, stop);
if (buffer.charAt(stop++) != ':') {
throw new RuntimeException("Error in parsing time zone");
}
//parse min
data.timezoneMin = negate * parseInt(buffer, stop, stop + 2);
if (stop + 2 != end) {
throw new RuntimeException("Error in parsing time zone");
}
if (data.timezoneHr != 0 || data.timezoneMin != 0) {
data.normalized = false;
}
} else {
throw new RuntimeException("Error in parsing time zone");
}
if (DEBUG) {
System.out.println("time[hh]=" + data.timezoneHr + " time[mm]=" + data.timezoneMin);
}
}
/**
* Computes index of given char within StringBuffer
*
* @param start
* @param end
* @param ch character to look for in StringBuffer
* @return index of ch within StringBuffer
*/
protected int indexOf(String buffer, int start, int end, char ch) {
for (int i = start; i < end; i++) {
if (buffer.charAt(i) == ch) {
return i;
}
}
return -1;
}
/**
* Validates given date/time object accoring to W3C PR Schema [D.1 ISO 8601
* Conventions]
*
* @param data
*/
protected void validateDateTime(DateTimeData data) {
//REVISIT: should we throw an exception for not valid dates
// or reporting an error message should be sufficient?
/**
* XML Schema 1.1 - RQ-123: Allow year 0000 in date related types.
*/
if (!Constants.SCHEMA_1_1_SUPPORT && data.year == 0) {
throw new RuntimeException("The year \"0000\" is an illegal year value");
}
if (data.month < 1 || data.month > 12) {
throw new RuntimeException("The month must have values 1 to 12");
}
//validate days
if (data.day > maxDayInMonthFor(data.year, data.month) || data.day < 1) {
throw new RuntimeException("The day must have values 1 to 31");
}
//validate hours
if (data.hour > 23 || data.hour < 0) {
if (data.hour == 24 && data.minute == 0 && data.second == 0) {
data.hour = 0;
if (++data.day > maxDayInMonthFor(data.year, data.month)) {
data.day = 1;
if (++data.month > 12) {
data.month = 1;
if (Constants.SCHEMA_1_1_SUPPORT) {
++data.year;
} else if (++data.year == 0) {
data.year = 1;
}
}
}
} else {
throw new RuntimeException("Hour must have values 0-23, unless 24:00:00");
}
}
//validate
if (data.minute > 59 || data.minute < 0) {
throw new RuntimeException("Minute must have values 0-59");
}
//validate
if (data.second >= 60 || data.second < 0) {
throw new RuntimeException("Second must have values 0-59");
}
//validate
if (data.timezoneHr > 14 || data.timezoneHr < -14) {
throw new RuntimeException("Time zone should have range -14:00 to +14:00");
} else {
if ((data.timezoneHr == 14 || data.timezoneHr == -14) && data.timezoneMin != 0) {
throw new RuntimeException("Time zone should have range -14:00 to +14:00");
} else if (data.timezoneMin > 59 || data.timezoneMin < -59) {
throw new RuntimeException("Minute must have values 0-59");
}
}
}
/**
* Return index of UTC char: 'Z', '+', '-'
*
* @param start
* @param end
* @return index of the UTC character that was found
*/
protected int findUTCSign(String buffer, int start, int end) {
int c;
for (int i = start; i < end; i++) {
c = buffer.charAt(i);
if (c == 'Z' || c == '+' || c == '-') {
return i;
}
}
return -1;
}
/**
* Returns
* <code>true</code> if the character at start is 'Z', '+' or '-'.
*/
protected final boolean isNextCharUTCSign(String buffer, int start, int end) {
if (start < end) {
char c = buffer.charAt(start);
return (c == 'Z' || c == '+' || c == '-');
}
return false;
}
/**
* Given start and end position, parses string value
*
* @param buffer string to parse
* @param start start position
* @param end end position
* @return return integer representation of characters
*/
protected int parseInt(String buffer, int start, int end)
throws NumberFormatException {
//REVISIT: more testing on this parsing needs to be done.
int radix = 10;
int result = 0;
int digit = 0;
int limit = -Integer.MAX_VALUE;
int multmin = limit / radix;
int i = start;
do {
digit = getDigit(buffer.charAt(i));
if (digit < 0) {
throw new NumberFormatException("'" + buffer + "' has wrong format");
}
if (result < multmin) {
throw new NumberFormatException("'" + buffer + "' has wrong format");
}
result *= radix;
if (result < limit + digit) {
throw new NumberFormatException("'" + buffer + "' has wrong format");
}
result -= digit;
} while (++i < end);
return -result;
}
// parse Year differently to support negative value.
protected int parseIntYear(String buffer, int end) {
int radix = 10;
int result = 0;
boolean negative = false;
int i = 0;
int limit;
int multmin;
int digit = 0;
if (buffer.charAt(0) == '-') {
negative = true;
limit = Integer.MIN_VALUE;
i++;
} else {
limit = -Integer.MAX_VALUE;
}
multmin = limit / radix;
while (i < end) {
digit = getDigit(buffer.charAt(i++));
if (digit < 0) {
throw new NumberFormatException("'" + buffer + "' has wrong format");
}
if (result < multmin) {
throw new NumberFormatException("'" + buffer + "' has wrong format");
}
result *= radix;
if (result < limit + digit) {
throw new NumberFormatException("'" + buffer + "' has wrong format");
}
result -= digit;
}
if (negative) {
if (i > 1) {
return result;
} else {
throw new NumberFormatException("'" + buffer + "' has wrong format");
}
}
return -result;
}
/**
* If timezone present - normalize dateTime [E Adding durations to
* dateTimes]
*
* @param date CCYY-MM-DDThh:mm:ss+03
*/
protected void normalize(DateTimeData date) {
// REVISIT: we have common code in addDuration() for durations
// should consider reorganizing it.
//
//add minutes (from time zone)
int negate = -1;
if (DEBUG) {
System.out.println("==>date.minute" + date.minute);
System.out.println("==>date.timezoneMin" + date.timezoneMin);
}
int temp = date.minute + negate * date.timezoneMin;
int carry = fQuotient(temp, 60);
date.minute = mod(temp, 60, carry);
if (DEBUG) {
System.out.println("==>carry: " + carry);
}
//add hours
temp = date.hour + negate * date.timezoneHr + carry;
carry = fQuotient(temp, 24);
date.hour = mod(temp, 24, carry);
if (DEBUG) {
System.out.println("==>date.hour" + date.hour);
System.out.println("==>carry: " + carry);
}
date.day = date.day + carry;
while (true) {
temp = maxDayInMonthFor(date.year, date.month);
if (date.day < 1) {
date.day = date.day + maxDayInMonthFor(date.year, date.month - 1);
carry = -1;
} else if (date.day > temp) {
date.day = date.day - temp;
carry = 1;
} else {
break;
}
temp = date.month + carry;
date.month = modulo(temp, 1, 13);
date.year = date.year + fQuotient(temp, 1, 13);
if (date.year == 0 && !Constants.SCHEMA_1_1_SUPPORT) {
date.year = (date.timezoneHr < 0 || date.timezoneMin < 0) ? 1 : -1;
}
}
date.utc = 'Z';
}
/**
* @param date
*/
protected void saveUnnormalized(DateTimeData date) {
date.unNormYear = date.year;
date.unNormMonth = date.month;
date.unNormDay = date.day;
date.unNormHour = date.hour;
date.unNormMinute = date.minute;
date.unNormSecond = date.second;
}
/**
* Resets object representation of date/time
*
* @param data date/time object
*/
protected void resetDateObj(DateTimeData data) {
data.year = 0;
data.month = 0;
data.day = 0;
data.hour = 0;
data.minute = 0;
data.second = 0;
data.utc = 0;
data.timezoneHr = 0;
data.timezoneMin = 0;
}
/**
* Given {year,month} computes maximum number of days for given month
*
* @param year
* @param month
* @return integer containg the number of days in a given month
*/
protected int maxDayInMonthFor(int year, int month) {
//validate days
if (month == 4 || month == 6 || month == 9 || month == 11) {
return 30;
} else if (month == 2) {
if (isLeapYear(year)) {
return 29;
} else {
return 28;
}
} else {
return 31;
}
}
private boolean isLeapYear(int year) {
//REVISIT: should we take care about Julian calendar?
return ((year % 4 == 0) && ((year % 100 != 0) || (year % 400 == 0)));
}
//
// help function described in W3C PR Schema [E Adding durations to dateTimes]
//
protected int mod(int a, int b, int quotient) {
//modulo(a, b) = a - fQuotient(a,b)*b
return (a - quotient * b);
}
//
// help function described in W3C PR Schema [E Adding durations to dateTimes]
//
protected int fQuotient(int a, int b) {
//fQuotient(a, b) = the greatest integer less than or equal to a/b
return (int) Math.floor((float) a / b);
}
//
// help function described in W3C PR Schema [E Adding durations to dateTimes]
//
protected int modulo(int temp, int low, int high) {
//modulo(a - low, high - low) + low
int a = temp - low;
int b = high - low;
return (mod(a, b, fQuotient(a, b)) + low);
}
//
// help function described in W3C PR Schema [E Adding durations to dateTimes]
//
protected int fQuotient(int temp, int low, int high) {
//fQuotient(a - low, high - low)
return fQuotient(temp - low, high - low);
}
protected String dateToString(DateTimeData date) {
StringBuffer message = new StringBuffer(25);
append(message, date.year, 4);
message.append('-');
append(message, date.month, 2);
message.append('-');
append(message, date.day, 2);
message.append('T');
append(message, date.hour, 2);
message.append(':');
append(message, date.minute, 2);
message.append(':');
append(message, date.second);
append(message, (char) date.utc, 0);
return message.toString();
}
protected final void append(StringBuffer message, int value, int nch) {
if (value == Integer.MIN_VALUE) {
message.append(value);
return;
}
if (value < 0) {
message.append('-');
value = -value;
}
if (nch == 4) {
if (value < 10) {
message.append("000");
} else if (value < 100) {
message.append("00");
} else if (value < 1000) {
message.append('0');
}
message.append(value);
} else if (nch == 2) {
if (value < 10) {
message.append('0');
}
message.append(value);
} else {
if (value != 0) {
message.append((char) value);
}
}
}
protected final void append(StringBuffer message, double value) {
if (value < 0) {
message.append('-');
value = -value;
}
if (value < 10) {
message.append('0');
}
append2(message, value);
}
protected final void append2(StringBuffer message, double value) {
final int intValue = (int) value;
if (value == intValue) {
message.append(intValue);
} else {
append3(message, value);
}
}
private void append3(StringBuffer message, double value) {
String d = String.valueOf(value);
int eIndex = d.indexOf('E');
if (eIndex == -1) {
message.append(d);
return;
}
int exp;
if (value < 1) {
// Need to convert from scientific notation of the form
// n.nnn...E-N (N >= 4) to a normal decimal value.
try {
exp = parseInt(d, eIndex + 2, d.length());
} // This should never happen.
// It's only possible if String.valueOf(double) is broken.
catch (Exception e) {
message.append(d);
return;
}
message.append("0.");
for (int i = 1; i < exp; ++i) {
message.append('0');
}
// Remove trailing zeros.
int end = eIndex - 1;
while (end > 0) {
char c = d.charAt(end);
if (c != '0') {
break;
}
--end;
}
// Now append the digits to the end. Skip over the decimal point.
for (int i = 0; i <= end; ++i) {
char c = d.charAt(i);
if (c != '.') {
message.append(c);
}
}
} else {
// Need to convert from scientific notation of the form
// n.nnn...EN (N >= 7) to a normal decimal value.
try {
exp = parseInt(d, eIndex + 1, d.length());
} // This should never happen.
// It's only possible if String.valueOf(double) is broken.
catch (Exception e) {
message.append(d);
return;
}
final int integerEnd = exp + 2;
for (int i = 0; i < eIndex; ++i) {
char c = d.charAt(i);
if (c != '.') {
if (i == integerEnd) {
message.append('.');
}
message.append(c);
}
}
// Append trailing zeroes if necessary.
for (int i = integerEnd - eIndex; i > 0; --i) {
message.append('0');
}
}
}
protected double parseSecond(String buffer, int start, int end)
throws NumberFormatException {
int dot = -1;
for (int i = start; i < end; i++) {
char ch = buffer.charAt(i);
if (ch == '.') {
dot = i;
} else if (ch > '9' || ch < '0') {
throw new NumberFormatException("'" + buffer + "' has wrong format");
}
}
if (dot == -1) {
if (start + 2 != end) {
throw new NumberFormatException("'" + buffer + "' has wrong format");
}
} else if (start + 2 != dot || dot + 1 == end) {
throw new NumberFormatException("'" + buffer + "' has wrong format");
}
return Double.parseDouble(buffer.substring(start, end));
}
//
//Private help functions
//
private void cloneDate(DateTimeData finalValue, DateTimeData tempDate) {
tempDate.year = finalValue.year;
tempDate.month = finalValue.month;
tempDate.day = finalValue.day;
tempDate.hour = finalValue.hour;
tempDate.minute = finalValue.minute;
tempDate.second = finalValue.second;
tempDate.utc = finalValue.utc;
tempDate.timezoneHr = finalValue.timezoneHr;
tempDate.timezoneMin = finalValue.timezoneMin;
}
/**
* Represents date time data
*/
static final class DateTimeData implements XSDateTime {
int year, month, day, hour, minute, utc;
double second;
int timezoneHr, timezoneMin;
private String originalValue;
boolean normalized = true;
int unNormYear;
int unNormMonth;
int unNormDay;
int unNormHour;
int unNormMinute;
double unNormSecond;
// used for comparisons - to decide the 'interesting' portions of
// a date/time based data type.
int position;
// a pointer to the type that was used go generate this data
// note that this is not the actual simple type, but one of the
// statically created XXXDV objects, so this won't cause any GC problem.
final AbstractDateTimeDV type;
private volatile String canonical;
public DateTimeData(String originalValue, AbstractDateTimeDV type) {
this.originalValue = originalValue;
this.type = type;
}
public DateTimeData(int year, int month, int day, int hour, int minute,
double second, int utc, String originalValue, boolean normalized, AbstractDateTimeDV type) {
this.year = year;
this.month = month;
this.day = day;
this.hour = hour;
this.minute = minute;
this.second = second;
this.utc = utc;
this.type = type;
this.originalValue = originalValue;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof DateTimeData)) {
return false;
}
return type.compareDates(this, (DateTimeData) obj, true) == 0;
}
// If two DateTimeData are equals - then they should have the same
// hashcode. This means we need to convert the date to UTC before
// we return its hashcode.
// The DateTimeData is unfortunately mutable - so we cannot
// cache the result of the conversion...
//
@Override
public int hashCode() {
final DateTimeData tempDate = new DateTimeData(null, type);
type.cloneDate(this, tempDate);
type.normalize(tempDate);
return type.dateToString(tempDate).hashCode();
}
@Override
public String toString() {
if (canonical == null) {
canonical = type.dateToString(this);
}
return canonical;
}
/* (non-Javadoc)
* @see org.apache.xerces.xs.datatypes.XSDateTime#getYear()
*/
@Override
public int getYears() {
if (type instanceof DurationDV) {
return 0;
}
return normalized ? year : unNormYear;
}
/* (non-Javadoc)
* @see org.apache.xerces.xs.datatypes.XSDateTime#getMonth()
*/
@Override
public int getMonths() {
if (type instanceof DurationDV) {
return year * 12 + month;
}
return normalized ? month : unNormMonth;
}
/* (non-Javadoc)
* @see org.apache.xerces.xs.datatypes.XSDateTime#getDay()
*/
@Override
public int getDays() {
if (type instanceof DurationDV) {
return 0;
}
return normalized ? day : unNormDay;
}
/* (non-Javadoc)
* @see org.apache.xerces.xs.datatypes.XSDateTime#getHour()
*/
@Override
public int getHours() {
if (type instanceof DurationDV) {
return 0;
}
return normalized ? hour : unNormHour;
}
/* (non-Javadoc)
* @see org.apache.xerces.xs.datatypes.XSDateTime#getMinutes()
*/
@Override
public int getMinutes() {
if (type instanceof DurationDV) {
return 0;
}
return normalized ? minute : unNormMinute;
}
/* (non-Javadoc)
* @see org.apache.xerces.xs.datatypes.XSDateTime#getSeconds()
*/
@Override
public double getSeconds() {
if (type instanceof DurationDV) {
return day * 24 * 60 * 60 + hour * 60 * 60 + minute * 60 + second;
}
return normalized ? second : unNormSecond;
}
/* (non-Javadoc)
* @see org.apache.xerces.xs.datatypes.XSDateTime#hasTimeZone()
*/
@Override
public boolean hasTimeZone() {
return utc != 0;
}
/* (non-Javadoc)
* @see org.apache.xerces.xs.datatypes.XSDateTime#getTimeZoneHours()
*/
@Override
public int getTimeZoneHours() {
return timezoneHr;
}
/* (non-Javadoc)
* @see org.apache.xerces.xs.datatypes.XSDateTime#getTimeZoneMinutes()
*/
@Override
public int getTimeZoneMinutes() {
return timezoneMin;
}
/* (non-Javadoc)
* @see org.apache.xerces.xs.datatypes.XSDateTime#getLexicalValue()
*/
@Override
public String getLexicalValue() {
return originalValue;
}
/* (non-Javadoc)
* @see org.apache.xerces.xs.datatypes.XSDateTime#normalize()
*/
@Override
public XSDateTime normalize() {
if (!normalized) {
DateTimeData dt = (DateTimeData) this.clone();
dt.normalized = true;
return dt;
}
return this;
}
/* (non-Javadoc)
* @see org.apache.xerces.xs.datatypes.XSDateTime#isNormalized()
*/
@Override
public boolean isNormalized() {
return normalized;
}
@Override
public Object clone() {
DateTimeData dt = new DateTimeData(this.year, this.month, this.day, this.hour,
this.minute, this.second, this.utc, this.originalValue, this.normalized, this.type);
dt.canonical = this.canonical;
dt.position = position;
dt.timezoneHr = this.timezoneHr;
dt.timezoneMin = this.timezoneMin;
dt.unNormYear = this.unNormYear;
dt.unNormMonth = this.unNormMonth;
dt.unNormDay = this.unNormDay;
dt.unNormHour = this.unNormHour;
dt.unNormMinute = this.unNormMinute;
dt.unNormSecond = this.unNormSecond;
return dt;
}
/* (non-Javadoc)
* @see org.apache.xerces.xs.datatypes.XSDateTime#getXMLGregorianCalendar()
*/
@Override
public XMLGregorianCalendar getXMLGregorianCalendar() {
return type.getXMLGregorianCalendar(this);
}
/* (non-Javadoc)
* @see org.apache.xerces.xs.datatypes.XSDateTime#getDuration()
*/
@Override
public Duration getDuration() {
return type.getDuration(this);
}
}
protected XMLGregorianCalendar getXMLGregorianCalendar(DateTimeData data) {
return null;
}
protected Duration getDuration(DateTimeData data) {
return null;
}
protected final BigDecimal getFractionalSecondsAsBigDecimal(DateTimeData data) {
final StringBuffer buf = new StringBuffer();
append3(buf, data.unNormSecond);
String value = buf.toString();
final int index = value.indexOf('.');
if (index == -1) {
return null;
}
value = value.substring(index);
final BigDecimal _val = new BigDecimal(value);
if (_val.compareTo(BigDecimal.valueOf(0)) == 0) {
return null;
}
return _val;
}
}
|
googleapis/google-cloud-java | 37,731 | java-discoveryengine/proto-google-cloud-discoveryengine-v1/src/main/java/com/google/cloud/discoveryengine/v1/ListControlsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1/control_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1;
/**
*
*
* <pre>
* Request for ListControls method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.ListControlsRequest}
*/
public final class ListControlsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1.ListControlsRequest)
ListControlsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListControlsRequest.newBuilder() to construct.
private ListControlsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListControlsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListControlsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.ControlServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListControlsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.ControlServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListControlsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.ListControlsRequest.class,
com.google.cloud.discoveryengine.v1.ListControlsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The data store resource name. Format:
* `projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}`
* or
* `projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The data store resource name. Format:
* `projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}`
* or
* `projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. Maximum number of results to return. If unspecified, defaults
* to 50. Max allowed value is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.discoveryengine.v1.ListControlsRequest.filter] is
* unset. Currently this field is unsupported.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.discoveryengine.v1.ListControlsRequest.filter] is
* unset. Currently this field is unsupported.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1.ListControlsRequest)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1.ListControlsRequest other =
(com.google.cloud.discoveryengine.v1.ListControlsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1.ListControlsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.ListControlsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListControlsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.ListControlsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListControlsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.ListControlsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListControlsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.ListControlsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListControlsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.ListControlsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListControlsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.ListControlsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1.ListControlsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for ListControls method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.ListControlsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1.ListControlsRequest)
com.google.cloud.discoveryengine.v1.ListControlsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.ControlServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListControlsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.ControlServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListControlsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.ListControlsRequest.class,
com.google.cloud.discoveryengine.v1.ListControlsRequest.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1.ListControlsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1.ControlServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListControlsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ListControlsRequest getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1.ListControlsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ListControlsRequest build() {
com.google.cloud.discoveryengine.v1.ListControlsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ListControlsRequest buildPartial() {
com.google.cloud.discoveryengine.v1.ListControlsRequest result =
new com.google.cloud.discoveryengine.v1.ListControlsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.discoveryengine.v1.ListControlsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1.ListControlsRequest) {
return mergeFrom((com.google.cloud.discoveryengine.v1.ListControlsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1.ListControlsRequest other) {
if (other == com.google.cloud.discoveryengine.v1.ListControlsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The data store resource name. Format:
* `projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}`
* or
* `projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The data store resource name. Format:
* `projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}`
* or
* `projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The data store resource name. Format:
* `projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}`
* or
* `projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The data store resource name. Format:
* `projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}`
* or
* `projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The data store resource name. Format:
* `projects/{project}/locations/{location}/collections/{collection_id}/dataStores/{data_store_id}`
* or
* `projects/{project}/locations/{location}/collections/{collection_id}/engines/{engine_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. Maximum number of results to return. If unspecified, defaults
* to 50. Max allowed value is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. Maximum number of results to return. If unspecified, defaults
* to 50. Max allowed value is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Maximum number of results to return. If unspecified, defaults
* to 50. Max allowed value is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListControls` call.
* Provide this to retrieve the subsequent page.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.discoveryengine.v1.ListControlsRequest.filter] is
* unset. Currently this field is unsupported.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.discoveryengine.v1.ListControlsRequest.filter] is
* unset. Currently this field is unsupported.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.discoveryengine.v1.ListControlsRequest.filter] is
* unset. Currently this field is unsupported.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.discoveryengine.v1.ListControlsRequest.filter] is
* unset. Currently this field is unsupported.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A filter to apply on the list results. Supported features:
*
* * List all the products under the parent branch if
* [filter][google.cloud.discoveryengine.v1.ListControlsRequest.filter] is
* unset. Currently this field is unsupported.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1.ListControlsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1.ListControlsRequest)
private static final com.google.cloud.discoveryengine.v1.ListControlsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1.ListControlsRequest();
}
public static com.google.cloud.discoveryengine.v1.ListControlsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListControlsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListControlsRequest>() {
@java.lang.Override
public ListControlsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListControlsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListControlsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ListControlsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,783 | java-analytics-admin/proto-google-analytics-admin-v1alpha/src/main/java/com/google/analytics/admin/v1alpha/UpdateConversionEventRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/admin/v1alpha/analytics_admin.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.admin.v1alpha;
/**
*
*
* <pre>
* Request message for UpdateConversionEvent RPC
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.UpdateConversionEventRequest}
*/
public final class UpdateConversionEventRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.admin.v1alpha.UpdateConversionEventRequest)
UpdateConversionEventRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateConversionEventRequest.newBuilder() to construct.
private UpdateConversionEventRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateConversionEventRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateConversionEventRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateConversionEventRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateConversionEventRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.UpdateConversionEventRequest.class,
com.google.analytics.admin.v1alpha.UpdateConversionEventRequest.Builder.class);
}
private int bitField0_;
public static final int CONVERSION_EVENT_FIELD_NUMBER = 1;
private com.google.analytics.admin.v1alpha.ConversionEvent conversionEvent_;
/**
*
*
* <pre>
* Required. The conversion event to update.
* The `name` field is used to identify the settings to be updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the conversionEvent field is set.
*/
@java.lang.Override
public boolean hasConversionEvent() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The conversion event to update.
* The `name` field is used to identify the settings to be updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The conversionEvent.
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.ConversionEvent getConversionEvent() {
return conversionEvent_ == null
? com.google.analytics.admin.v1alpha.ConversionEvent.getDefaultInstance()
: conversionEvent_;
}
/**
*
*
* <pre>
* Required. The conversion event to update.
* The `name` field is used to identify the settings to be updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.ConversionEventOrBuilder getConversionEventOrBuilder() {
return conversionEvent_ == null
? com.google.analytics.admin.v1alpha.ConversionEvent.getDefaultInstance()
: conversionEvent_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getConversionEvent());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getConversionEvent());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.admin.v1alpha.UpdateConversionEventRequest)) {
return super.equals(obj);
}
com.google.analytics.admin.v1alpha.UpdateConversionEventRequest other =
(com.google.analytics.admin.v1alpha.UpdateConversionEventRequest) obj;
if (hasConversionEvent() != other.hasConversionEvent()) return false;
if (hasConversionEvent()) {
if (!getConversionEvent().equals(other.getConversionEvent())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasConversionEvent()) {
hash = (37 * hash) + CONVERSION_EVENT_FIELD_NUMBER;
hash = (53 * hash) + getConversionEvent().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.admin.v1alpha.UpdateConversionEventRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.UpdateConversionEventRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateConversionEventRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.UpdateConversionEventRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateConversionEventRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.UpdateConversionEventRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateConversionEventRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.UpdateConversionEventRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateConversionEventRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.UpdateConversionEventRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.UpdateConversionEventRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.UpdateConversionEventRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.analytics.admin.v1alpha.UpdateConversionEventRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for UpdateConversionEvent RPC
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.UpdateConversionEventRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.admin.v1alpha.UpdateConversionEventRequest)
com.google.analytics.admin.v1alpha.UpdateConversionEventRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateConversionEventRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateConversionEventRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.UpdateConversionEventRequest.class,
com.google.analytics.admin.v1alpha.UpdateConversionEventRequest.Builder.class);
}
// Construct using com.google.analytics.admin.v1alpha.UpdateConversionEventRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getConversionEventFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
conversionEvent_ = null;
if (conversionEventBuilder_ != null) {
conversionEventBuilder_.dispose();
conversionEventBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_UpdateConversionEventRequest_descriptor;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateConversionEventRequest
getDefaultInstanceForType() {
return com.google.analytics.admin.v1alpha.UpdateConversionEventRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateConversionEventRequest build() {
com.google.analytics.admin.v1alpha.UpdateConversionEventRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateConversionEventRequest buildPartial() {
com.google.analytics.admin.v1alpha.UpdateConversionEventRequest result =
new com.google.analytics.admin.v1alpha.UpdateConversionEventRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.analytics.admin.v1alpha.UpdateConversionEventRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.conversionEvent_ =
conversionEventBuilder_ == null ? conversionEvent_ : conversionEventBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.admin.v1alpha.UpdateConversionEventRequest) {
return mergeFrom((com.google.analytics.admin.v1alpha.UpdateConversionEventRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.analytics.admin.v1alpha.UpdateConversionEventRequest other) {
if (other
== com.google.analytics.admin.v1alpha.UpdateConversionEventRequest.getDefaultInstance())
return this;
if (other.hasConversionEvent()) {
mergeConversionEvent(other.getConversionEvent());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getConversionEventFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.analytics.admin.v1alpha.ConversionEvent conversionEvent_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.ConversionEvent,
com.google.analytics.admin.v1alpha.ConversionEvent.Builder,
com.google.analytics.admin.v1alpha.ConversionEventOrBuilder>
conversionEventBuilder_;
/**
*
*
* <pre>
* Required. The conversion event to update.
* The `name` field is used to identify the settings to be updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the conversionEvent field is set.
*/
public boolean hasConversionEvent() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The conversion event to update.
* The `name` field is used to identify the settings to be updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The conversionEvent.
*/
public com.google.analytics.admin.v1alpha.ConversionEvent getConversionEvent() {
if (conversionEventBuilder_ == null) {
return conversionEvent_ == null
? com.google.analytics.admin.v1alpha.ConversionEvent.getDefaultInstance()
: conversionEvent_;
} else {
return conversionEventBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The conversion event to update.
* The `name` field is used to identify the settings to be updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setConversionEvent(com.google.analytics.admin.v1alpha.ConversionEvent value) {
if (conversionEventBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
conversionEvent_ = value;
} else {
conversionEventBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The conversion event to update.
* The `name` field is used to identify the settings to be updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setConversionEvent(
com.google.analytics.admin.v1alpha.ConversionEvent.Builder builderForValue) {
if (conversionEventBuilder_ == null) {
conversionEvent_ = builderForValue.build();
} else {
conversionEventBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The conversion event to update.
* The `name` field is used to identify the settings to be updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeConversionEvent(com.google.analytics.admin.v1alpha.ConversionEvent value) {
if (conversionEventBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& conversionEvent_ != null
&& conversionEvent_
!= com.google.analytics.admin.v1alpha.ConversionEvent.getDefaultInstance()) {
getConversionEventBuilder().mergeFrom(value);
} else {
conversionEvent_ = value;
}
} else {
conversionEventBuilder_.mergeFrom(value);
}
if (conversionEvent_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The conversion event to update.
* The `name` field is used to identify the settings to be updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearConversionEvent() {
bitField0_ = (bitField0_ & ~0x00000001);
conversionEvent_ = null;
if (conversionEventBuilder_ != null) {
conversionEventBuilder_.dispose();
conversionEventBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The conversion event to update.
* The `name` field is used to identify the settings to be updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.analytics.admin.v1alpha.ConversionEvent.Builder getConversionEventBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getConversionEventFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The conversion event to update.
* The `name` field is used to identify the settings to be updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.analytics.admin.v1alpha.ConversionEventOrBuilder
getConversionEventOrBuilder() {
if (conversionEventBuilder_ != null) {
return conversionEventBuilder_.getMessageOrBuilder();
} else {
return conversionEvent_ == null
? com.google.analytics.admin.v1alpha.ConversionEvent.getDefaultInstance()
: conversionEvent_;
}
}
/**
*
*
* <pre>
* Required. The conversion event to update.
* The `name` field is used to identify the settings to be updated.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.ConversionEvent conversion_event = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.ConversionEvent,
com.google.analytics.admin.v1alpha.ConversionEvent.Builder,
com.google.analytics.admin.v1alpha.ConversionEventOrBuilder>
getConversionEventFieldBuilder() {
if (conversionEventBuilder_ == null) {
conversionEventBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.analytics.admin.v1alpha.ConversionEvent,
com.google.analytics.admin.v1alpha.ConversionEvent.Builder,
com.google.analytics.admin.v1alpha.ConversionEventOrBuilder>(
getConversionEvent(), getParentForChildren(), isClean());
conversionEvent_ = null;
}
return conversionEventBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The list of fields to be updated. Field names must be in snake
* case (e.g., "field_to_update"). Omitted fields will not be updated. To
* replace the entire entity, use one path with the string "*" to match all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.admin.v1alpha.UpdateConversionEventRequest)
}
// @@protoc_insertion_point(class_scope:google.analytics.admin.v1alpha.UpdateConversionEventRequest)
private static final com.google.analytics.admin.v1alpha.UpdateConversionEventRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.admin.v1alpha.UpdateConversionEventRequest();
}
public static com.google.analytics.admin.v1alpha.UpdateConversionEventRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateConversionEventRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateConversionEventRequest>() {
@java.lang.Override
public UpdateConversionEventRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateConversionEventRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateConversionEventRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.UpdateConversionEventRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hudi | 37,713 | hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/table/ITTestSchemaEvolution.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hudi.table;
import org.apache.hudi.avro.model.HoodieCompactionPlan;
import org.apache.hudi.client.HoodieFlinkWriteClient;
import org.apache.hudi.common.config.HoodieCommonConfig;
import org.apache.hudi.common.config.HoodieMetadataConfig;
import org.apache.hudi.common.table.HoodieTableConfig;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.util.CompactionUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.StringUtils;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.internal.schema.Types;
import org.apache.hudi.keygen.constant.KeyGeneratorOptions;
import org.apache.hudi.sink.compact.CompactOperator;
import org.apache.hudi.sink.compact.CompactionCommitEvent;
import org.apache.hudi.sink.compact.CompactionCommitSink;
import org.apache.hudi.sink.compact.CompactionPlanSourceFunction;
import org.apache.hudi.util.AvroSchemaConverter;
import org.apache.hudi.util.FlinkWriteClients;
import org.apache.hudi.utils.FlinkMiniCluster;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.factories.FactoryUtil;
import org.apache.flink.types.Row;
import org.apache.flink.util.CloseableIterator;
import org.apache.flink.util.Preconditions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.io.TempDir;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
import static org.apache.hudi.common.testutils.HoodieTestUtils.INSTANT_GENERATOR;
import static org.apache.hudi.internal.schema.action.TableChange.ColumnPositionChange.ColumnPositionType.AFTER;
import static org.apache.hudi.internal.schema.action.TableChange.ColumnPositionChange.ColumnPositionType.BEFORE;
import static org.apache.hudi.utils.TestConfigurations.ROW_TYPE_EVOLUTION_AFTER;
import static org.apache.hudi.utils.TestConfigurations.ROW_TYPE_EVOLUTION_BEFORE;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
@SuppressWarnings({"SqlDialectInspection", "SqlNoDataSourceInspection"})
@ExtendWith(FlinkMiniCluster.class)
public class ITTestSchemaEvolution {
private static final Logger LOG = LoggerFactory.getLogger(ITTestSchemaEvolution.class);
@TempDir File tempFile;
private StreamTableEnvironment tEnv;
private StreamExecutionEnvironment env;
@BeforeEach
public void setUp() {
env = StreamExecutionEnvironment.getExecutionEnvironment().setParallelism(1);
tEnv = StreamTableEnvironment.create(env);
}
@Test
public void testCopyOnWriteInputFormat() throws Exception {
testSchemaEvolution(defaultTableOptions(tempFile.getAbsolutePath()));
}
@Test
public void testMergeOnReadInputFormatBaseFileOnlyIterator() throws Exception {
TableOptions tableOptions = defaultTableOptions(tempFile.getAbsolutePath())
.withOption(FlinkOptions.READ_AS_STREAMING.key(), true)
.withOption(FlinkOptions.READ_START_COMMIT.key(), FlinkOptions.START_COMMIT_EARLIEST)
.withOption(FlinkOptions.READ_STREAMING_SKIP_COMPACT.key(), false);
testSchemaEvolution(tableOptions);
}
@Test
public void testMergeOnReadInputFormatBaseFileOnlyFilteringIterator() throws Exception {
TableOptions tableOptions = defaultTableOptions(tempFile.getAbsolutePath())
.withOption(FlinkOptions.READ_AS_STREAMING.key(), true)
.withOption(FlinkOptions.READ_START_COMMIT.key(), 1)
.withOption(FlinkOptions.READ_STREAMING_SKIP_COMPACT.key(), false);
testSchemaEvolution(tableOptions);
}
@Test
public void testMergeOnReadInputFormatLogFileOnlyIteratorGetLogFileIterator() throws Exception {
TableOptions tableOptions = defaultTableOptions(tempFile.getAbsolutePath())
.withOption(FlinkOptions.TABLE_TYPE.key(), FlinkOptions.TABLE_TYPE_MERGE_ON_READ);
testSchemaEvolution(tableOptions);
}
@Test
public void testMergeOnReadInputFormatLogFileOnlyIteratorGetUnMergedLogFileIterator() throws Exception {
TableOptions tableOptions = defaultTableOptions(tempFile.getAbsolutePath())
.withOption(FlinkOptions.TABLE_TYPE.key(), FlinkOptions.TABLE_TYPE_MERGE_ON_READ)
.withOption(FlinkOptions.READ_AS_STREAMING.key(), true)
.withOption(FlinkOptions.READ_START_COMMIT.key(), FlinkOptions.START_COMMIT_EARLIEST)
.withOption(FlinkOptions.CHANGELOG_ENABLED.key(), true);
testSchemaEvolution(tableOptions, false, EXPECTED_UNMERGED_RESULT);
}
@Test
public void testMergeOnReadInputFormatMergeIterator() throws Exception {
TableOptions tableOptions = defaultTableOptions(tempFile.getAbsolutePath())
.withOption(FlinkOptions.TABLE_TYPE.key(), FlinkOptions.TABLE_TYPE_MERGE_ON_READ)
.withOption(FlinkOptions.COMPACTION_DELTA_COMMITS.key(), 1);
testSchemaEvolution(tableOptions, true);
}
@Test
public void testMergeOnReadInputFormatSkipMergeIterator() throws Exception {
TableOptions tableOptions = defaultTableOptions(tempFile.getAbsolutePath())
.withOption(FlinkOptions.TABLE_TYPE.key(), FlinkOptions.TABLE_TYPE_MERGE_ON_READ)
.withOption(FlinkOptions.COMPACTION_DELTA_COMMITS.key(), 1)
.withOption(FlinkOptions.MERGE_TYPE.key(), FlinkOptions.REALTIME_SKIP_MERGE);
testSchemaEvolution(tableOptions, true, EXPECTED_UNMERGED_RESULT);
}
@Test
public void testCompaction() throws Exception {
TableOptions tableOptions = defaultTableOptions(tempFile.getAbsolutePath())
.withOption(FlinkOptions.TABLE_TYPE.key(), FlinkOptions.TABLE_TYPE_MERGE_ON_READ)
.withOption(FlinkOptions.COMPACTION_DELTA_COMMITS.key(), 1);
testSchemaEvolution(tableOptions);
doCompact(tableOptions.toConfig());
checkAnswerEvolved(EXPECTED_MERGED_RESULT.evolvedRows);
}
private void testSchemaEvolution(TableOptions tableOptions) throws Exception {
testSchemaEvolution(tableOptions, false);
}
private void testSchemaEvolution(TableOptions tableOptions, boolean shouldCompact) throws Exception {
testSchemaEvolution(tableOptions, shouldCompact, EXPECTED_MERGED_RESULT);
}
private void testSchemaEvolution(TableOptions tableOptions, boolean shouldCompact, ExpectedResult expectedResult) throws Exception {
writeTableWithSchema1(tableOptions);
changeTableSchema(tableOptions, shouldCompact);
writeTableWithSchema2(tableOptions);
checkAnswerEvolved(expectedResult.evolvedRows);
checkAnswerCount(expectedResult.rowCount);
checkAnswerWithMeta(tableOptions, expectedResult.rowsWithMeta);
}
private void writeTableWithSchema1(TableOptions tableOptions) throws ExecutionException, InterruptedException {
//language=SQL
tEnv.executeSql(""
+ "create table t1 ("
+ " uuid string,"
+ " name string,"
+ " gender char,"
+ " age int,"
+ " ts timestamp,"
+ " f_struct row<f0 int, f1 string, drop_add string, change_type int>,"
+ " f_map map<string, int>,"
+ " f_array array<int>,"
+ " f_row_map map<string, row<f0 int, f1 string, drop_add string, change_type int>>,"
+ " f_row_array array<row<f0 int, f1 string, drop_add string, change_type int>>,"
+ " `partition` string"
+ ") partitioned by (`partition`) with (" + tableOptions + ")"
);
// An explicit cast is performed for map-values to prevent implicit map.key strings from being truncated/extended based the last row's inferred schema
//language=SQL
tEnv.executeSql(""
+ "insert into t1 select "
+ " cast(uuid as string),"
+ " cast(name as string),"
+ " cast(gender as char),"
+ " cast(age as int),"
+ " cast(ts as timestamp),"
+ " cast(f_struct as row<f0 int, f1 string, drop_add string, change_type int>),"
+ " cast(f_map as map<string, int>),"
+ " cast(f_array as array<int>),"
+ " cast(f_row_map as map<string, row< f0 int, f1 string, drop_add string, change_type int>>),"
+ " cast(f_row_array as array<row< f0 int, f1 string, drop_add string, change_type int>>),"
+ " cast(`partition` as string) "
+ "from (values "
+ " ('id0', 'Indica', 'F', 12, '2000-01-01 00:00:00', cast(null as row<f0 int, f1 string, drop_add string, change_type int>), map['Indica', 1212], array[12], "
+ " cast(null as map<string, row<f0 int, f1 string, drop_add string, change_type int>>), array[row(0, 's0', '', 0)], 'par0'),"
+ " ('id1', 'Danny', 'M', 23, '2000-01-01 00:00:01', row(1, 's1', '', 1), cast(map['Danny', 2323] as map<string, int>), array[23, 23], "
+ " cast(map['Danny', row(1, 's1', '', 1)] as map<string, row<f0 int, f1 string, drop_add string, change_type int>>), array[row(1, 's1', '', 1)], 'par1'),"
+ " ('id2', 'Stephen', 'M', 33, '2000-01-01 00:00:02', row(2, 's2', '', 2), cast(map['Stephen', 3333] as map<string, int>), array[33], "
+ " cast(map['Stephen', row(2, 's2', '', 2)] as map<string, row<f0 int, f1 string, drop_add string, change_type int>>), array[row(2, 's2', '', 2)], 'par1'),"
+ " ('id3', 'Julian', 'M', 53, '2000-01-01 00:00:03', row(3, 's3', '', 3), cast(map['Julian', 5353] as map<string, int>), array[53, 53], "
+ " cast(map['Julian', row(3, 's3', '', 3)] as map<string, row<f0 int, f1 string, drop_add string, change_type int>>), array[row(3, 's3', '', 3)], 'par2'),"
+ " ('id4', 'Fabian', 'M', 31, '2000-01-01 00:00:04', row(4, 's4', '', 4), cast(map['Fabian', 3131] as map<string, int>), array[31], "
+ " cast(map['Fabian', row(4, 's4', '', 4)] as map<string, row<f0 int, f1 string, drop_add string, change_type int>>), array[row(4, 's4', '', 4)], 'par2'),"
+ " ('id5', 'Sophia', 'F', 18, '2000-01-01 00:00:05', row(5, 's5', '', 5), cast(map['Sophia', 1818] as map<string, int>), array[18, 18], "
+ " cast(map['Sophia', row(5, 's5', '', 5)] as map<string, row<f0 int, f1 string, drop_add string, change_type int>>), array[row(5, 's5', '', 5)], 'par3'),"
+ " ('id6', 'Emma', 'F', 20, '2000-01-01 00:00:06', row(6, 's6', '', 6), cast(map['Emma', 2020] as map<string, int>), array[20], "
+ " cast(map['Emma', row(6, 's6', '', 6)] as map<string, row<f0 int, f1 string, drop_add string, change_type int>>), array[row(6, 's6', '', 6)], 'par3'),"
+ " ('id7', 'Bob', 'M', 44, '2000-01-01 00:00:07', row(7, 's7', '', 7), cast(map['Bob', 4444] as map<string, int>), array[44, 44], "
+ " cast(map['Bob', row(7, 's7', '', 7)] as map<string, row<f0 int, f1 string, drop_add string, change_type int>>), array[row(7, 's7', '', 7)], 'par4'),"
+ " ('id8', 'Han', 'M', 56, '2000-01-01 00:00:08', row(8, 's8', '', 8), cast(map['Han', 5656] as map<string, int>), array[56, 56, 56], "
+ " cast(map['Han', row(8, 's8', '', 8)] as map<string, row<f0 int, f1 string, drop_add string, change_type int>>), array[row(8, 's8', '', 8)], 'par4')"
+ ") as A(uuid, name, gender, age, ts, f_struct, f_map, f_array, f_row_map, f_row_array, `partition`)"
).await();
}
private void changeTableSchema(TableOptions tableOptions, boolean shouldCompactBeforeSchemaChanges) throws IOException {
Configuration conf = tableOptions.toConfig();
try (HoodieFlinkWriteClient<?> writeClient = FlinkWriteClients.createWriteClient(conf)) {
if (shouldCompactBeforeSchemaChanges) {
doCompact(conf);
}
Schema intType = SchemaBuilder.unionOf().nullType().and().intType().endUnion();
Schema longType = SchemaBuilder.unionOf().nullType().and().longType().endUnion();
Schema doubleType = SchemaBuilder.unionOf().nullType().and().doubleType().endUnion();
Schema stringType = SchemaBuilder.unionOf().nullType().and().stringType().endUnion();
Schema structType = SchemaBuilder.builder().record("new_row_col").fields()
.name("f0").type(longType).noDefault()
.name("f1").type(stringType).noDefault().endRecord();
Schema arrayType = Schema.createUnion(SchemaBuilder.builder().array().items(stringType), SchemaBuilder.builder().nullType());
Schema mapType = Schema.createUnion(SchemaBuilder.builder().map().values(stringType), SchemaBuilder.builder().nullType());
writeClient.addColumn("salary", doubleType, null, "name", AFTER);
writeClient.deleteColumns("gender");
writeClient.renameColumn("name", "first_name");
writeClient.updateColumnType("age", Types.StringType.get());
writeClient.addColumn("last_name", stringType, "empty allowed", "salary", BEFORE);
writeClient.reOrderColPosition("age", "first_name", BEFORE);
// add a field in the middle of the `f_struct` and `f_row_map` columns
writeClient.addColumn("f_struct.f2", intType, "add field in middle of struct", "f_struct.f0", AFTER);
writeClient.addColumn("f_row_map.value.f2", intType, "add field in middle of struct", "f_row_map.value.f0", AFTER);
// add a field at the end of `f_struct` and `f_row_map` column
writeClient.addColumn("f_struct.f3", stringType);
writeClient.addColumn("f_row_map.value.f3", stringType);
// delete and add a field with the same name
// reads should not return previously inserted datum of dropped field of the same name
writeClient.deleteColumns("f_struct.drop_add");
writeClient.addColumn("f_struct.drop_add", doubleType);
writeClient.deleteColumns("f_row_map.value.drop_add");
writeClient.addColumn("f_row_map.value.drop_add", doubleType);
// perform comprehensive evolution on complex types (struct, array, map) by promoting its primitive types
writeClient.updateColumnType("f_struct.change_type", Types.LongType.get());
writeClient.renameColumn("f_struct.change_type", "renamed_change_type");
writeClient.updateColumnType("f_row_map.value.change_type", Types.LongType.get());
writeClient.renameColumn("f_row_map.value.change_type", "renamed_change_type");
writeClient.updateColumnType("f_array.element", Types.DoubleType.get());
writeClient.updateColumnType("f_map.value", Types.DoubleType.get());
// perform comprehensive schema evolution on table by adding complex typed columns
writeClient.addColumn("new_row_col", structType);
writeClient.addColumn("new_array_col", arrayType);
writeClient.addColumn("new_map_col", mapType);
writeClient.reOrderColPosition("partition", "new_map_col", AFTER);
// perform comprehensive evolution on a struct column by reordering field positions
writeClient.updateColumnType("f_struct.f0", Types.DecimalType.get(20, 0));
writeClient.reOrderColPosition("f_struct.f0", "f_struct.drop_add", AFTER);
writeClient.updateColumnType("f_row_map.value.f0", Types.DecimalType.get(20, 0));
writeClient.reOrderColPosition("f_row_map.value.f0", "f_row_map.value.drop_add", AFTER);
} catch (Exception e) {
throw new HoodieException(e);
}
}
private void writeTableWithSchema2(TableOptions tableOptions) throws ExecutionException, InterruptedException {
tableOptions.withOption(
FlinkOptions.SOURCE_AVRO_SCHEMA.key(),
AvroSchemaConverter.convertToSchema(ROW_TYPE_EVOLUTION_AFTER, "hoodie.t1.t1_record"));
//language=SQL
tEnv.executeSql("drop table t1");
//language=SQL
tEnv.executeSql(""
+ "create table t1 ("
+ " uuid string,"
+ " age string,"
+ " first_name string,"
+ " last_name string,"
+ " salary double,"
+ " ts timestamp,"
+ " f_struct row<f2 int, f1 string, renamed_change_type bigint, f3 string, drop_add string, f0 decimal(20, 0)>,"
+ " f_map map<string, double>,"
+ " f_array array<double>,"
+ " f_row_map map<string, row<f2 int, f1 string, renamed_change_type bigint, f3 string, drop_add string, f0 decimal(20, 0)>>,"
+ " f_row_array array<row<f0 int, f1 string, drop_add string, change_type int>>,"
+ " new_row_col row<f0 bigint, f1 string>,"
+ " new_array_col array<string>,"
+ " new_map_col map<string, string>,"
+ " `partition` string"
+ ") partitioned by (`partition`) with (" + tableOptions + ")"
);
//language=SQL
tEnv.executeSql(""
+ "insert into t1 select "
+ " cast(uuid as string),"
+ " cast(age as string),"
+ " cast(first_name as string),"
+ " cast(last_name as string),"
+ " cast(salary as double),"
+ " cast(ts as timestamp),"
+ " cast(f_struct as row<f2 int, f1 string, renamed_change_type bigint, f3 string, drop_add string, f0 decimal(20, 0)>),"
+ " cast(f_map as map<string, double>),"
+ " cast(f_array as array<double>),"
+ " cast(f_row_map as map<string, row<f2 int, f1 string, renamed_change_type bigint, f3 string, drop_add string, f0 decimal(20, 0)>>),"
+ " cast(f_row_array as array<row<f0 int, f1 string, drop_add string, change_type int>>),"
+ " cast(new_row_col as row<f0 bigint, f1 string>),"
+ " cast(new_array_col as array<string>),"
+ " cast(new_map_col as map<string, string>),"
+ " cast(`partition` as string) "
+ "from (values "
+ " ('id1', '23', 'Danny', '', 10000.1, '2000-01-01 00:00:01', row(1, 's1', 11, 't1', 'drop_add1', 1), cast(map['Danny', 2323.23] as map<string, double>), array[23, 23, 23], "
+ " cast(map['Danny', row(1, 's1', 11, 't1', 'drop_add1', 1)] as map<string, row<f2 int, f1 string, renamed_change_type bigint, f3 string, drop_add string, f0 decimal(20, 0)>>), "
+ " array[row(1, 's1', '', 1)], "
+ " row(1, '1'), array['1'], Map['k1','v1'], 'par1'),"
+ " ('id9', 'unknown', 'Alice', '', 90000.9, '2000-01-01 00:00:09', row(9, 's9', 99, 't9', 'drop_add9', 9), cast(map['Alice', 9999.99] as map<string, double>), array[9999, 9999], "
+ " cast(map['Alice', row(9, 's9', 99, 't9', 'drop_add9', 9)] as map<string, row<f2 int, f1 string, renamed_change_type bigint, f3 string, drop_add string, f0 decimal(20, 0)>>), "
+ " array[row(9, 's9', '', 9)], "
+ " row(9, '9'), array['9'], Map['k9','v9'], 'par1'),"
+ " ('id3', '53', 'Julian', '', 30000.3, '2000-01-01 00:00:03', row(3, 's3', 33, 't3', 'drop_add3', 3), cast(map['Julian', 5353.53] as map<string, double>), array[53], "
+ " cast(map['Julian', row(3, 's3', 33, 't3', 'drop_add3', 3)] as map<string, row<f2 int, f1 string, renamed_change_type bigint, f3 string, drop_add string, f0 decimal(20, 0)>>), "
+ " array[row(3, 's3', '', 3)], "
+ " row(3, '3'), array['3'], Map['k3','v3'], 'par2')"
+ ") as A(uuid, age, first_name, last_name, salary, ts, f_struct, f_map, f_array, f_row_map, f_row_array, new_row_col, new_array_col, new_map_col, `partition`)"
).await();
}
private TableOptions defaultTableOptions(String tablePath) {
return new TableOptions(
FactoryUtil.CONNECTOR.key(), HoodieTableFactory.FACTORY_ID,
FlinkOptions.PATH.key(), tablePath,
FlinkOptions.TABLE_TYPE.key(), FlinkOptions.TABLE_TYPE_COPY_ON_WRITE,
HoodieTableConfig.NAME.key(), "t1",
FlinkOptions.READ_AS_STREAMING.key(), false,
FlinkOptions.QUERY_TYPE.key(), FlinkOptions.QUERY_TYPE_SNAPSHOT,
KeyGeneratorOptions.RECORDKEY_FIELD_NAME.key(), "uuid",
KeyGeneratorOptions.PARTITIONPATH_FIELD_NAME.key(), "partition",
KeyGeneratorOptions.HIVE_STYLE_PARTITIONING_ENABLE.key(), true,
FlinkOptions.WRITE_BATCH_SIZE.key(), 0.000001, // each record triggers flush
FlinkOptions.SOURCE_AVRO_SCHEMA.key(), AvroSchemaConverter.convertToSchema(ROW_TYPE_EVOLUTION_BEFORE),
FlinkOptions.READ_TASKS.key(), 1,
FlinkOptions.WRITE_TASKS.key(), 1,
FlinkOptions.INDEX_BOOTSTRAP_TASKS.key(), 1,
FlinkOptions.BUCKET_ASSIGN_TASKS.key(), 1,
FlinkOptions.COMPACTION_TASKS.key(), 1,
FlinkOptions.COMPACTION_SCHEDULE_ENABLED.key(), false,
HoodieWriteConfig.EMBEDDED_TIMELINE_SERVER_REUSE_ENABLED.key(), false,
HoodieCommonConfig.SCHEMA_EVOLUTION_ENABLE.key(), true,
HoodieMetadataConfig.ENABLE_METADATA_INDEX_COLUMN_STATS.key(), "true",
HoodieMetadataConfig.ENABLE_METADATA_INDEX_PARTITION_STATS.key(), "false");
}
private void checkAnswerEvolved(String... expectedResult) throws Exception {
//language=SQL
checkAnswer(""
+ "select "
+ " first_name, "
+ " salary, "
+ " age, "
+ " f_struct, "
+ " f_map, "
+ " f_array, "
+ " f_row_map, "
+ " f_row_array, "
+ " new_row_col, "
+ " new_array_col, "
+ " new_map_col "
+ "from t1", expectedResult);
}
private void checkAnswerCount(String... expectedResult) throws Exception {
//language=SQL
checkAnswer("select count(*) from t1", expectedResult);
}
private void checkAnswerWithMeta(TableOptions tableOptions, String... expectedResult) throws Exception {
//language=SQL
tEnv.executeSql("drop table t1");
//language=SQL
tEnv.executeSql(""
+ "create table t1 ("
+ " `_hoodie_commit_time` string,"
+ " `_hoodie_commit_seqno` string,"
+ " `_hoodie_record_key` string,"
+ " `_hoodie_partition_path` string,"
+ " `_hoodie_file_name` string,"
+ " uuid string,"
+ " age string,"
+ " first_name string,"
+ " last_name string,"
+ " salary double,"
+ " ts timestamp,"
+ " f_struct row<f2 int, f1 string, renamed_change_type bigint, f3 string, drop_add string, f0 decimal(20, 0)>,"
+ " f_map map<string, double>,"
+ " f_array array<double>,"
+ " f_row_map map<string, row<f2 int, f1 string, renamed_change_type bigint, f3 string, drop_add string, f0 decimal(20, 0)>>,"
+ " f_row_array array<row<f0 int, f1 string, drop_add string, change_type int>>,"
+ " new_row_col row<f0 bigint, f1 string>,"
+ " new_array_col array<string>,"
+ " new_map_col map<string, string>,"
+ " `partition` string"
+ ") partitioned by (`partition`) with (" + tableOptions + ")"
);
//language=SQL
checkAnswer(""
+ "select "
+ " `_hoodie_record_key`, "
+ " first_name, "
+ " salary, "
+ " age, "
+ " f_struct, "
+ " f_map, "
+ " f_array, "
+ " f_row_map, "
+ " f_row_array, "
+ " new_row_col, "
+ " new_array_col, "
+ " new_map_col "
+ "from t1", expectedResult);
}
private void doCompact(Configuration conf) throws Exception {
// use sync compaction to ensure compaction finished.
conf.set(FlinkOptions.COMPACTION_ASYNC_ENABLED, false);
try (HoodieFlinkWriteClient writeClient = FlinkWriteClients.createWriteClient(conf)) {
HoodieFlinkTable<?> table = writeClient.getHoodieTable();
Option<String> compactionInstantOpt = writeClient.scheduleCompaction(Option.empty());
String compactionInstantTime = compactionInstantOpt.get();
// generate compaction plan
// should support configurable commit metadata
HoodieCompactionPlan compactionPlan = CompactionUtils.getCompactionPlan(
table.getMetaClient(), compactionInstantTime);
HoodieInstant instant = INSTANT_GENERATOR.getCompactionRequestedInstant(compactionInstantTime);
// Mark instant as compaction inflight
table.getActiveTimeline().transitionCompactionRequestedToInflight(instant);
env.addSource(new CompactionPlanSourceFunction(Collections.singletonList(Pair.of(compactionInstantTime, compactionPlan)), conf))
.name("compaction_source")
.uid("uid_compaction_source")
.rebalance()
.transform("compact_task",
TypeInformation.of(CompactionCommitEvent.class),
new CompactOperator(conf))
.setParallelism(FlinkMiniCluster.DEFAULT_PARALLELISM)
.addSink(new CompactionCommitSink(conf))
.name("compaction_commit")
.uid("uid_compaction_commit")
.setParallelism(1);
env.execute("flink_hudi_compaction");
assertTrue(table.getMetaClient().reloadActiveTimeline().filterCompletedInstants().containsInstant(instant.requestedTime()));
}
}
private void checkAnswer(String query, String... expectedResult) {
TableResult actualResult = tEnv.executeSql(query);
Set<String> expected = new HashSet<>(Arrays.asList(expectedResult));
Set<String> actual = new HashSet<>();
// create a runnable to handle reads (especially useful for streaming reads as they are unbounded)
Runnable runnable = () -> {
try (CloseableIterator<Row> iterator = actualResult.collect()) {
while (iterator.hasNext()) {
actual.add(iterator.next().toString());
}
} catch (Exception e) {
throw new RuntimeException(e);
}
};
ExecutorService executor = Executors.newSingleThreadExecutor();
Future future = executor.submit(runnable);
try {
// allow result collector to run for a short period of time
future.get(5, TimeUnit.SECONDS);
} catch (TimeoutException e) {
future.cancel(true);
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
executor.shutdownNow();
}
for (String expectedItem : expected) {
if (!actual.contains(expectedItem)) {
LOG.info("Not in actual: {}", expectedItem);
}
}
for (String actualItem : actual) {
if (!expected.contains(actualItem)) {
LOG.info("Not in expected: {}", actualItem);
}
}
assertEquals(expected, actual);
}
private static final class TableOptions {
private final Map<String, String> map = new HashMap<>();
TableOptions(Object... options) {
Preconditions.checkArgument(options.length % 2 == 0);
for (int i = 0; i < options.length; i += 2) {
withOption(options[i].toString(), options[i + 1]);
}
}
TableOptions withOption(String optionName, Object optionValue) {
if (StringUtils.isNullOrEmpty(optionName)) {
throw new IllegalArgumentException("optionName must be presented");
}
map.put(optionName, optionValue.toString());
return this;
}
Configuration toConfig() {
return FlinkOptions.fromMap(map);
}
@Override
public String toString() {
return map.entrySet().stream()
.map(e -> String.format("'%s' = '%s'", e.getKey(), e.getValue()))
.collect(Collectors.joining(", "));
}
}
private static final class ExpectedResult {
final String[] evolvedRows;
final String[] rowsWithMeta;
final String[] rowCount;
private ExpectedResult(String[] evolvedRows, String[] rowsWithMeta, String[] rowCount) {
this.evolvedRows = evolvedRows;
this.rowsWithMeta = rowsWithMeta;
this.rowCount = rowCount;
}
}
//TODO: null arrays have a single null row; array with null vs array with row will all null values
private static final ExpectedResult EXPECTED_MERGED_RESULT = new ExpectedResult(
new String[] {
"+I[Indica, null, 12, null, {Indica=1212.0}, [12.0], null, [+I[0, s0, , 0]], null, null, null]",
"+I[Danny, 10000.1, 23, +I[1, s1, 11, t1, drop_add1, 1], {Danny=2323.23}, [23.0, 23.0, 23.0], {Danny=+I[1, s1, 11, t1, drop_add1, 1]}, [+I[1, s1, , 1]], +I[1, 1], [1], {k1=v1}]",
"+I[Stephen, null, 33, +I[null, s2, 2, null, null, 2], {Stephen=3333.0}, [33.0], {Stephen=+I[null, s2, 2, null, null, 2]}, [+I[2, s2, , 2]], null, null, null]",
"+I[Julian, 30000.3, 53, +I[3, s3, 33, t3, drop_add3, 3], {Julian=5353.53}, [53.0], {Julian=+I[3, s3, 33, t3, drop_add3, 3]}, [+I[3, s3, , 3]], +I[3, 3], [3], {k3=v3}]",
"+I[Fabian, null, 31, +I[null, s4, 4, null, null, 4], {Fabian=3131.0}, [31.0], {Fabian=+I[null, s4, 4, null, null, 4]}, [+I[4, s4, , 4]], null, null, null]",
"+I[Sophia, null, 18, +I[null, s5, 5, null, null, 5], {Sophia=1818.0}, [18.0, 18.0], {Sophia=+I[null, s5, 5, null, null, 5]}, [+I[5, s5, , 5]], null, null, null]",
"+I[Emma, null, 20, +I[null, s6, 6, null, null, 6], {Emma=2020.0}, [20.0], {Emma=+I[null, s6, 6, null, null, 6]}, [+I[6, s6, , 6]], null, null, null]",
"+I[Bob, null, 44, +I[null, s7, 7, null, null, 7], {Bob=4444.0}, [44.0, 44.0], {Bob=+I[null, s7, 7, null, null, 7]}, [+I[7, s7, , 7]], null, null, null]",
"+I[Han, null, 56, +I[null, s8, 8, null, null, 8], {Han=5656.0}, [56.0, 56.0, 56.0], {Han=+I[null, s8, 8, null, null, 8]}, [+I[8, s8, , 8]], null, null, null]",
"+I[Alice, 90000.9, unknown, +I[9, s9, 99, t9, drop_add9, 9], {Alice=9999.99}, [9999.0, 9999.0], {Alice=+I[9, s9, 99, t9, drop_add9, 9]}, [+I[9, s9, , 9]], +I[9, 9], [9], {k9=v9}]",
},
new String[] {
"+I[id0, Indica, null, 12, null, {Indica=1212.0}, [12.0], null, [+I[0, s0, , 0]], null, null, null]",
"+I[id1, Danny, 10000.1, 23, +I[1, s1, 11, t1, drop_add1, 1], {Danny=2323.23}, [23.0, 23.0, 23.0], {Danny=+I[1, s1, 11, t1, drop_add1, 1]}, [+I[1, s1, , 1]], +I[1, 1], [1], {k1=v1}]",
"+I[id2, Stephen, null, 33, +I[null, s2, 2, null, null, 2], {Stephen=3333.0}, [33.0], {Stephen=+I[null, s2, 2, null, null, 2]}, [+I[2, s2, , 2]], null, null, null]",
"+I[id3, Julian, 30000.3, 53, +I[3, s3, 33, t3, drop_add3, 3], {Julian=5353.53}, [53.0], {Julian=+I[3, s3, 33, t3, drop_add3, 3]}, [+I[3, s3, , 3]], +I[3, 3], [3], {k3=v3}]",
"+I[id4, Fabian, null, 31, +I[null, s4, 4, null, null, 4], {Fabian=3131.0}, [31.0], {Fabian=+I[null, s4, 4, null, null, 4]}, [+I[4, s4, , 4]], null, null, null]",
"+I[id5, Sophia, null, 18, +I[null, s5, 5, null, null, 5], {Sophia=1818.0}, [18.0, 18.0], {Sophia=+I[null, s5, 5, null, null, 5]}, [+I[5, s5, , 5]], null, null, null]",
"+I[id6, Emma, null, 20, +I[null, s6, 6, null, null, 6], {Emma=2020.0}, [20.0], {Emma=+I[null, s6, 6, null, null, 6]}, [+I[6, s6, , 6]], null, null, null]",
"+I[id7, Bob, null, 44, +I[null, s7, 7, null, null, 7], {Bob=4444.0}, [44.0, 44.0], {Bob=+I[null, s7, 7, null, null, 7]}, [+I[7, s7, , 7]], null, null, null]",
"+I[id8, Han, null, 56, +I[null, s8, 8, null, null, 8], {Han=5656.0}, [56.0, 56.0, 56.0], {Han=+I[null, s8, 8, null, null, 8]}, [+I[8, s8, , 8]], null, null, null]",
"+I[id9, Alice, 90000.9, unknown, +I[9, s9, 99, t9, drop_add9, 9], {Alice=9999.99}, [9999.0, 9999.0], {Alice=+I[9, s9, 99, t9, drop_add9, 9]}, [+I[9, s9, , 9]], +I[9, 9], [9], {k9=v9}]"
},
new String[] {
"+I[1]",
"+U[2]",
"+U[3]",
"+U[4]",
"+U[5]",
"+U[6]",
"+U[7]",
"+U[8]",
"+U[9]",
"+U[10]",
"-U[1]",
"-U[2]",
"-U[3]",
"-U[4]",
"-U[5]",
"-U[6]",
"-U[7]",
"-U[8]",
"-U[9]",
}
);
private static final ExpectedResult EXPECTED_UNMERGED_RESULT = new ExpectedResult(
new String[] {
"+I[Indica, null, 12, null, {Indica=1212.0}, [12.0], null, [+I[0, s0, , 0]], null, null, null]",
"+I[Danny, null, 23, +I[null, s1, 1, null, null, 1], {Danny=2323.0}, [23.0, 23.0], {Danny=+I[null, s1, 1, null, null, 1]}, [+I[1, s1, , 1]], null, null, null]",
"+I[Stephen, null, 33, +I[null, s2, 2, null, null, 2], {Stephen=3333.0}, [33.0], {Stephen=+I[null, s2, 2, null, null, 2]}, [+I[2, s2, , 2]], null, null, null]",
"+I[Julian, null, 53, +I[null, s3, 3, null, null, 3], {Julian=5353.0}, [53.0, 53.0], {Julian=+I[null, s3, 3, null, null, 3]}, [+I[3, s3, , 3]], null, null, null]",
"+I[Fabian, null, 31, +I[null, s4, 4, null, null, 4], {Fabian=3131.0}, [31.0], {Fabian=+I[null, s4, 4, null, null, 4]}, [+I[4, s4, , 4]], null, null, null]",
"+I[Sophia, null, 18, +I[null, s5, 5, null, null, 5], {Sophia=1818.0}, [18.0, 18.0], {Sophia=+I[null, s5, 5, null, null, 5]}, [+I[5, s5, , 5]], null, null, null]",
"+I[Emma, null, 20, +I[null, s6, 6, null, null, 6], {Emma=2020.0}, [20.0], {Emma=+I[null, s6, 6, null, null, 6]}, [+I[6, s6, , 6]], null, null, null]",
"+I[Bob, null, 44, +I[null, s7, 7, null, null, 7], {Bob=4444.0}, [44.0, 44.0], {Bob=+I[null, s7, 7, null, null, 7]}, [+I[7, s7, , 7]], null, null, null]",
"+I[Han, null, 56, +I[null, s8, 8, null, null, 8], {Han=5656.0}, [56.0, 56.0, 56.0], {Han=+I[null, s8, 8, null, null, 8]}, [+I[8, s8, , 8]], null, null, null]",
"+I[Alice, 90000.9, unknown, +I[9, s9, 99, t9, drop_add9, 9], {Alice=9999.99}, [9999.0, 9999.0], {Alice=+I[9, s9, 99, t9, drop_add9, 9]}, [+I[9, s9, , 9]], +I[9, 9], [9], {k9=v9}]",
"+I[Danny, 10000.1, 23, +I[1, s1, 11, t1, drop_add1, 1], {Danny=2323.23}, [23.0, 23.0, 23.0], {Danny=+I[1, s1, 11, t1, drop_add1, 1]}, [+I[1, s1, , 1]], +I[1, 1], [1], {k1=v1}]",
"+I[Julian, 30000.3, 53, +I[3, s3, 33, t3, drop_add3, 3], {Julian=5353.53}, [53.0], {Julian=+I[3, s3, 33, t3, drop_add3, 3]}, [+I[3, s3, , 3]], +I[3, 3], [3], {k3=v3}]"
},
new String[] {
"+I[id0, Indica, null, 12, null, {Indica=1212.0}, [12.0], null, [+I[0, s0, , 0]], null, null, null]",
"+I[id1, Danny, null, 23, +I[null, s1, 1, null, null, 1], {Danny=2323.0}, [23.0, 23.0], {Danny=+I[null, s1, 1, null, null, 1]}, [+I[1, s1, , 1]], null, null, null]",
"+I[id2, Stephen, null, 33, +I[null, s2, 2, null, null, 2], {Stephen=3333.0}, [33.0], {Stephen=+I[null, s2, 2, null, null, 2]}, [+I[2, s2, , 2]], null, null, null]",
"+I[id3, Julian, null, 53, +I[null, s3, 3, null, null, 3], {Julian=5353.0}, [53.0, 53.0], {Julian=+I[null, s3, 3, null, null, 3]}, [+I[3, s3, , 3]], null, null, null]",
"+I[id4, Fabian, null, 31, +I[null, s4, 4, null, null, 4], {Fabian=3131.0}, [31.0], {Fabian=+I[null, s4, 4, null, null, 4]}, [+I[4, s4, , 4]], null, null, null]",
"+I[id5, Sophia, null, 18, +I[null, s5, 5, null, null, 5], {Sophia=1818.0}, [18.0, 18.0], {Sophia=+I[null, s5, 5, null, null, 5]}, [+I[5, s5, , 5]], null, null, null]",
"+I[id6, Emma, null, 20, +I[null, s6, 6, null, null, 6], {Emma=2020.0}, [20.0], {Emma=+I[null, s6, 6, null, null, 6]}, [+I[6, s6, , 6]], null, null, null]",
"+I[id7, Bob, null, 44, +I[null, s7, 7, null, null, 7], {Bob=4444.0}, [44.0, 44.0], {Bob=+I[null, s7, 7, null, null, 7]}, [+I[7, s7, , 7]], null, null, null]",
"+I[id8, Han, null, 56, +I[null, s8, 8, null, null, 8], {Han=5656.0}, [56.0, 56.0, 56.0], {Han=+I[null, s8, 8, null, null, 8]}, [+I[8, s8, , 8]], null, null, null]",
"+I[id9, Alice, 90000.9, unknown, +I[9, s9, 99, t9, drop_add9, 9], {Alice=9999.99}, [9999.0, 9999.0], {Alice=+I[9, s9, 99, t9, drop_add9, 9]}, [+I[9, s9, , 9]], +I[9, 9], [9], {k9=v9}]",
"+I[id1, Danny, 10000.1, 23, +I[1, s1, 11, t1, drop_add1, 1], {Danny=2323.23}, [23.0, 23.0, 23.0], {Danny=+I[1, s1, 11, t1, drop_add1, 1]}, [+I[1, s1, , 1]], +I[1, 1], [1], {k1=v1}]",
"+I[id3, Julian, 30000.3, 53, +I[3, s3, 33, t3, drop_add3, 3], {Julian=5353.53}, [53.0], {Julian=+I[3, s3, 33, t3, drop_add3, 3]}, [+I[3, s3, , 3]], +I[3, 3], [3], {k3=v3}]"
},
new String[] {
"+I[1]",
"+U[2]",
"+U[3]",
"+U[4]",
"+U[5]",
"+U[6]",
"+U[7]",
"+U[8]",
"+U[9]",
"+U[10]",
"-U[10]",
"+U[11]",
"-U[11]",
"+U[12]",
"-U[1]",
"-U[2]",
"-U[3]",
"-U[4]",
"-U[5]",
"-U[6]",
"-U[7]",
"-U[8]",
"-U[9]",
}
);
}
|
apache/kylin | 38,230 | src/query/src/test/java/org/apache/kylin/query/util/RexToTblColRefTranslatorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.query.util;
import java.io.File;
import java.io.IOException;
import java.math.BigDecimal;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.stream.IntStream;
import org.apache.calcite.avatica.util.TimeUnit;
import org.apache.calcite.avatica.util.TimeUnitRange;
import org.apache.calcite.jdbc.JavaTypeFactoryImpl;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeSystem;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexCall;
import org.apache.calcite.rex.RexInputRef;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.rex.RexToSqlNodeConverter;
import org.apache.calcite.rex.RexUtil;
import org.apache.calcite.sql.SqlIntervalQualifier;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.commons.io.FileUtils;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.util.Pair;
import org.apache.kylin.guava30.shaded.common.collect.ImmutableList;
import org.apache.kylin.guava30.shaded.common.collect.Lists;
import org.apache.kylin.guava30.shaded.common.collect.Maps;
import org.apache.kylin.guava30.shaded.common.collect.Sets;
import org.apache.kylin.metadata.model.TblColRef;
import org.apache.kylin.query.relnode.ColumnRowType;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class RexToTblColRefTranslatorTest {
private static final RelDataTypeFactory TYPE_FACTORY = new JavaTypeFactoryImpl(RelDataTypeSystem.DEFAULT);
private static final RexBuilder REX_BUILDER = new RexBuilder(TYPE_FACTORY);
private RelDataType boolRelDataType = TYPE_FACTORY.createSqlType(SqlTypeName.BOOLEAN);
private RelDataType timestampRelDataType = TYPE_FACTORY.createSqlType(SqlTypeName.TIMESTAMP);
private final RelDataType bingIntRelDataType = TYPE_FACTORY.createSqlType(SqlTypeName.BIGINT);
private final SqlIntervalQualifier second = new SqlIntervalQualifier(TimeUnit.SECOND, null, SqlParserPos.ZERO);
private final SqlIntervalQualifier minute = new SqlIntervalQualifier(TimeUnit.MINUTE, null, SqlParserPos.ZERO);
private final SqlIntervalQualifier hour = new SqlIntervalQualifier(TimeUnit.HOUR, null, SqlParserPos.ZERO);
private final SqlIntervalQualifier day = new SqlIntervalQualifier(TimeUnit.DAY, null, SqlParserPos.ZERO);
private final SqlIntervalQualifier week = new SqlIntervalQualifier(TimeUnit.WEEK, null, SqlParserPos.ZERO);
private final SqlIntervalQualifier month = new SqlIntervalQualifier(TimeUnit.MONTH, null, SqlParserPos.ZERO);
private final SqlIntervalQualifier quarter = new SqlIntervalQualifier(TimeUnit.QUARTER, null, SqlParserPos.ZERO);
private final SqlIntervalQualifier year = new SqlIntervalQualifier(TimeUnit.YEAR, null, SqlParserPos.ZERO);
private RexNode x, y, z;
private RexNode literalOne, literalTwo, literalThree;
private final List<String> properties = Lists.newArrayList();
@Before
public void setUp() throws IOException {
File tmpFile = File.createTempFile("RexToTblColRefTranslatorTest", "");
FileUtils.deleteQuietly(tmpFile);
FileUtils.forceMkdir(tmpFile);
File metadata = new File(tmpFile.getAbsolutePath() + "/metadata");
FileUtils.forceMkdir(metadata);
File tempKylinProperties = new File(tmpFile, "kylin.properties");
tmpFile.deleteOnExit();
FileUtils.touch(tempKylinProperties);
FileUtils.writeLines(tempKylinProperties, properties);
//implicitly set KYLIN_CONF
KylinConfig.setKylinConfigForLocalTest(tmpFile.getCanonicalPath());
x = new RexInputRef(0, TYPE_FACTORY.createTypeWithNullability(boolRelDataType, true));
y = new RexInputRef(1, TYPE_FACTORY.createTypeWithNullability(boolRelDataType, true));
z = new RexInputRef(2, TYPE_FACTORY.createTypeWithNullability(boolRelDataType, true));
literalOne = REX_BUILDER.makeLiteral("1");
literalTwo = REX_BUILDER.makeLiteral("2");
literalThree = REX_BUILDER.makeLiteral("3");
}
@After
public void testDown() {
boolRelDataType = null;
timestampRelDataType = null;
x = y = z = null;
}
private RexNode lessThan(RexNode a0, RexNode a1) {
return REX_BUILDER.makeCall(SqlStdOperatorTable.LESS_THAN, a0, a1);
}
private RexNode greaterThan(RexNode a0, RexNode a1) {
return REX_BUILDER.makeCall(SqlStdOperatorTable.GREATER_THAN, a0, a1);
}
@Test
public void testTransformRexNode() {
RexNode node1 = greaterThan(x, literalOne);
RexNode node2 = lessThan(y, literalTwo);
RexNode node3 = greaterThan(z, literalThree);
// test flatten and
RexNode structuredAND = REX_BUILDER.makeCall(SqlStdOperatorTable.AND,
REX_BUILDER.makeCall(SqlStdOperatorTable.AND, node1, node2), node3);
RexNode flattenAnd = RexUtil.flatten(REX_BUILDER, structuredAND);
RexNode transformedAndRexNode = RexToTblColRefTranslator.createLeftCall(flattenAnd);
Assert.assertEquals("AND(AND(>($0, '1'), <($1, '2')), >($2, '3'))", structuredAND.toString());
Assert.assertEquals("AND(>($0, '1'), <($1, '2'), >($2, '3'))", flattenAnd.toString());
Assert.assertEquals("AND(AND(>($0, '1'), <($1, '2')), >($2, '3'))", transformedAndRexNode.toString());
// test flatten or
RexNode structuredOR = REX_BUILDER.makeCall(SqlStdOperatorTable.OR,
REX_BUILDER.makeCall(SqlStdOperatorTable.OR, node1, node2), node3);
Assert.assertEquals("OR(OR(>($0, '1'), <($1, '2')), >($2, '3'))", structuredOR.toString());
RexNode originOrRexNode = RexUtil.flatten(REX_BUILDER, structuredOR);
RexNode transformedOrRexNode = RexToTblColRefTranslator.createLeftCall(originOrRexNode);
Assert.assertEquals("OR(>($0, '1'), <($1, '2'), >($2, '3'))", originOrRexNode.toString());
Assert.assertEquals("OR(OR(>($0, '1'), <($1, '2')), >($2, '3'))", transformedOrRexNode.toString());
// test will not flatten case
RexNode complex = REX_BUILDER.makeCall(SqlStdOperatorTable.OR,
REX_BUILDER.makeCall(SqlStdOperatorTable.AND, node1, node2),
REX_BUILDER.makeCall(SqlStdOperatorTable.AND, node2, node3));
RexNode complexNotFlatten = RexUtil.flatten(REX_BUILDER, complex);
RexNode transformedComplex = RexToTblColRefTranslator.createLeftCall(complexNotFlatten);
String expected = "OR(AND(>($0, '1'), <($1, '2')), AND(<($1, '2'), >($2, '3')))";
Assert.assertEquals(expected, complex.toString());
Assert.assertEquals(expected, complexNotFlatten.toString());
Assert.assertEquals(expected, transformedComplex.toString());
}
/**
* verify timestampdiff(second, col1, col2)
* RexNode is: /(Reinterpret(-($0, $1)), 1000)
*/
@Test
public void testTimestampDiffWithTimeUnitSecond() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final RexNode innerNode = REX_BUILDER.makeReinterpretCast(bingIntRelDataType,
REX_BUILDER.makeCall(TYPE_FACTORY.createSqlIntervalType(second), SqlStdOperatorTable.MINUS_DATE,
createStableRexNodes(oriRexToTblColRefMap)),
x);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(bingIntRelDataType, SqlStdOperatorTable.DIVIDE_DATE,
Lists.newArrayList(innerNode, REX_BUILDER.makeBigintLiteral(new BigDecimal(1000))));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("TIMESTAMPDIFF(SECOND, `T_1_CED5FEB`.`TIME1`, `T_1_CED5FEB`.`TIME0`)",
rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify timestampdiff(minute, col1, col2)
* RexNode is: /(Reinterpret(-($0, $1)), '60000')
*/
@Test
public void testTimestampDiffWithTimeUnitMinute() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final RexNode innerNode = REX_BUILDER.makeReinterpretCast(bingIntRelDataType,
REX_BUILDER.makeCall(TYPE_FACTORY.createSqlIntervalType(minute), SqlStdOperatorTable.MINUS_DATE,
createStableRexNodes(oriRexToTblColRefMap)),
x);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(bingIntRelDataType, SqlStdOperatorTable.DIVIDE_DATE,
Lists.newArrayList(innerNode, REX_BUILDER.makeBigintLiteral(new BigDecimal(60000))));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("TIMESTAMPDIFF(MINUTE, `T_1_CED5FEB`.`TIME1`, `T_1_CED5FEB`.`TIME0`)",
rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify timestampdiff(hour, col1, col2)
* RexNode is: /(Reinterpret(-($0, $1)), 3600000)
*/
@Test
public void testTimestampDiffWithTimeUnitHour() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final RexNode innerNode = REX_BUILDER.makeReinterpretCast(bingIntRelDataType,
REX_BUILDER.makeCall(TYPE_FACTORY.createSqlIntervalType(hour), SqlStdOperatorTable.MINUS_DATE,
createStableRexNodes(oriRexToTblColRefMap)),
x);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(bingIntRelDataType, SqlStdOperatorTable.DIVIDE_DATE,
Lists.newArrayList(innerNode, REX_BUILDER.makeBigintLiteral(new BigDecimal(3600000))));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("TIMESTAMPDIFF(HOUR, `T_1_CED5FEB`.`TIME1`, `T_1_CED5FEB`.`TIME0`)",
rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify timestampdiff(day, col1, col2)
* RexNode is: /(Reinterpret(-($0, $1)), 86400000)
*/
@Test
public void testTimestampDiffWithTimeUnitDay() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final RexNode innerNode = REX_BUILDER.makeReinterpretCast(bingIntRelDataType,
REX_BUILDER.makeCall(TYPE_FACTORY.createSqlIntervalType(day), SqlStdOperatorTable.MINUS_DATE,
createStableRexNodes(oriRexToTblColRefMap)),
x);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(bingIntRelDataType, SqlStdOperatorTable.DIVIDE_DATE,
Lists.newArrayList(innerNode, REX_BUILDER.makeBigintLiteral(new BigDecimal(86400000))));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("TIMESTAMPDIFF(DAY, `T_1_CED5FEB`.`TIME1`, `T_1_CED5FEB`.`TIME0`)",
rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify timestampdiff(week, col1, col2)
* RexNode is: /(/(Reinterpret(-($0, $1)), 1000), 604800)
*/
@Test
public void testTimestampDiffWithTimeUnitWeek() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final RexNode innerNode = REX_BUILDER.makeReinterpretCast(bingIntRelDataType,
REX_BUILDER.makeCall(TYPE_FACTORY.createSqlIntervalType(second), SqlStdOperatorTable.MINUS_DATE,
createStableRexNodes(oriRexToTblColRefMap)),
x);
final RexNode medianNode = REX_BUILDER.makeCall(bingIntRelDataType, SqlStdOperatorTable.DIVIDE_DATE,
Lists.newArrayList(innerNode, REX_BUILDER.makeLiteral("1000")));
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(bingIntRelDataType, SqlStdOperatorTable.DIVIDE_DATE,
Lists.newArrayList(medianNode, REX_BUILDER.makeBigintLiteral(new BigDecimal(604800))));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("TIMESTAMPDIFF(WEEK, `T_1_CED5FEB`.`TIME1`, `T_1_CED5FEB`.`TIME0`)",
rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify timestampdiff(month, col1, col2)
* RexNode is: Reinterpret(-($0, $1))
*/
@Test
public void testTimestampDiffWithTimeUnitMonth() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final RexCall rexNode = (RexCall) REX_BUILDER.makeReinterpretCast(bingIntRelDataType,
REX_BUILDER.makeCall(TYPE_FACTORY.createSqlIntervalType(month), SqlStdOperatorTable.MINUS_DATE,
createStableRexNodes(oriRexToTblColRefMap)),
x);
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("TIMESTAMPDIFF(MONTH, `T_1_CED5FEB`.`TIME1`, `T_1_CED5FEB`.`TIME0`)",
rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify timestampdiff(month, col1, col2)
* RexNode is: /(Reinterpret(-($0, $1)), 3)
*/
@Test
public void testTimestampDiffWithTimeUnitQuarter() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final RexNode innerNode = REX_BUILDER.makeReinterpretCast(bingIntRelDataType,
REX_BUILDER.makeCall(TYPE_FACTORY.createSqlIntervalType(month), SqlStdOperatorTable.MINUS_DATE,
createStableRexNodes(oriRexToTblColRefMap)),
x);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(bingIntRelDataType, SqlStdOperatorTable.DIVIDE_DATE,
Lists.newArrayList(innerNode, REX_BUILDER.makeBigintLiteral(new BigDecimal(3))));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("TIMESTAMPDIFF(QUARTER, `T_1_CED5FEB`.`TIME1`, `T_1_CED5FEB`.`TIME0`)",
rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify timestampdiff(year, col1, col2)
* RexNode is: /(Reinterpret(-($0, $1)), 12)
*/
@Test
public void testTimestampDiffWithTimeUnitYear() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final RexNode innerNode = REX_BUILDER.makeReinterpretCast(bingIntRelDataType,
REX_BUILDER.makeCall(TYPE_FACTORY.createSqlIntervalType(year), SqlStdOperatorTable.MINUS_DATE,
createStableRexNodes(oriRexToTblColRefMap)),
x);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(bingIntRelDataType, SqlStdOperatorTable.DIVIDE_DATE,
Lists.newArrayList(innerNode, REX_BUILDER.makeBigintLiteral(new BigDecimal(12))));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("TIMESTAMPDIFF(YEAR, `T_1_CED5FEB`.`TIME1`, `T_1_CED5FEB`.`TIME0`)",
rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify timestampadd(second, 1, col2)
* RexNode is: DATETIME_PLUS($0, 1000)
*/
@Test
public void testTimestampAddWithTimeUnitSecond() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final List<RexNode> stableRexNodes = createStableRexNodes(oriRexToTblColRefMap);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(timestampRelDataType, SqlStdOperatorTable.DATETIME_PLUS,
Lists.newArrayList(stableRexNodes.get(0),
REX_BUILDER.makeIntervalLiteral(new BigDecimal(1000), second)));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("TIMESTAMPADD(SECOND, 1, `T_1_CED5FEB`.`TIME0`)", rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify timestampadd(minute, 1, col2)
* RexNode is: DATETIME_PLUS($0, 60000)
*/
@Test
public void testTimestampAddWithTimeUnitMinute() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final List<RexNode> stableRexNodes = createStableRexNodes(oriRexToTblColRefMap);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(timestampRelDataType, SqlStdOperatorTable.DATETIME_PLUS,
Lists.newArrayList(stableRexNodes.get(0),
REX_BUILDER.makeIntervalLiteral(new BigDecimal(60000), minute)));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("TIMESTAMPADD(MINUTE, 1, `T_1_CED5FEB`.`TIME0`)", rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify timestampadd(hour, 1, col2)
* RexNode is: DATETIME_PLUS($0, 3600000)
*/
@Test
public void testTimestampAddWithTimeUnitHour() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final List<RexNode> stableRexNodes = createStableRexNodes(oriRexToTblColRefMap);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(timestampRelDataType, SqlStdOperatorTable.DATETIME_PLUS,
Lists.newArrayList(stableRexNodes.get(0),
REX_BUILDER.makeIntervalLiteral(new BigDecimal(3600000), hour)));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("TIMESTAMPADD(HOUR, 1, `T_1_CED5FEB`.`TIME0`)", rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify timestampadd(day, 1, col2)
* RexNode is: DATETIME_PLUS($0, 86400000)
*/
@Test
public void testTimestampAddWithTimeUnitDay() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final List<RexNode> stableRexNodes = createStableRexNodes(oriRexToTblColRefMap);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(timestampRelDataType, SqlStdOperatorTable.DATETIME_PLUS,
Lists.newArrayList(stableRexNodes.get(0),
REX_BUILDER.makeIntervalLiteral(new BigDecimal(86400000), day)));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("TIMESTAMPADD(DAY, 1, `T_1_CED5FEB`.`TIME0`)", rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify timestampadd(week, 1, col2)
* RexNode is: DATETIME_PLUS($0, 604800000)
*/
@Test
public void testTimestampAddWithTimeUnitWeek() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final List<RexNode> stableRexNodes = createStableRexNodes(oriRexToTblColRefMap);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(timestampRelDataType, SqlStdOperatorTable.DATETIME_PLUS,
Lists.newArrayList(stableRexNodes.get(0),
REX_BUILDER.makeIntervalLiteral(new BigDecimal(604800000), week)));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("TIMESTAMPADD(WEEK, 1, `T_1_CED5FEB`.`TIME0`)", rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify timestampadd(month, 1, col2)
* RexNode is: DATETIME_PLUS($0, 1)
*/
@Test
public void testTimestampAddWithTimeUnitMonth() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final List<RexNode> stableRexNodes = createStableRexNodes(oriRexToTblColRefMap);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(timestampRelDataType, SqlStdOperatorTable.DATETIME_PLUS,
Lists.newArrayList(stableRexNodes.get(0), REX_BUILDER.makeIntervalLiteral(new BigDecimal(1), month)));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("TIMESTAMPADD(MONTH, 1, `T_1_CED5FEB`.`TIME0`)", rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify timestampadd(quarter, 1, col2)
* RexNode is: DATETIME_PLUS($0, 3)
*/
@Test
public void testTimestampAddWithTimeUnitQuarter() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final List<RexNode> stableRexNodes = createStableRexNodes(oriRexToTblColRefMap);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(timestampRelDataType, SqlStdOperatorTable.DATETIME_PLUS,
Lists.newArrayList(stableRexNodes.get(0), REX_BUILDER.makeIntervalLiteral(new BigDecimal(3), quarter)));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("TIMESTAMPADD(QUARTER, 1, `T_1_CED5FEB`.`TIME0`)", rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify timestampadd(year, 1, col2)
* RexNode is: DATETIME_PLUS($0, 12)
*/
@Test
public void testTimestampAddWithTimeUnitYear() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final List<RexNode> stableRexNodes = createStableRexNodes(oriRexToTblColRefMap);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(timestampRelDataType, SqlStdOperatorTable.DATETIME_PLUS,
Lists.newArrayList(stableRexNodes.get(0), REX_BUILDER.makeIntervalLiteral(new BigDecimal(12), year)));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("TIMESTAMPADD(YEAR, 1, `T_1_CED5FEB`.`TIME0`)", rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify function: year(col1) or extract(year from col1)
* RexNode is: EXTRACT(FLAG(YEAR), $0)
*/
@Test
public void testFunctionYear() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final List<RexNode> stableRexNodes = createStableRexNodes(oriRexToTblColRefMap);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(bingIntRelDataType, SqlStdOperatorTable.EXTRACT,
Lists.newArrayList(REX_BUILDER.makeFlag(TimeUnitRange.YEAR), stableRexNodes.get(0)));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("YEAR(`T_1_CED5FEB`.`TIME0`)", rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify function: quarter(col1) or extract(quarter from col1)
* RexNode is: EXTRACT(FLAG(QUARTER), $0)
*/
@Test
public void testFunctionQuarter() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final List<RexNode> stableRexNodes = createStableRexNodes(oriRexToTblColRefMap);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(bingIntRelDataType, SqlStdOperatorTable.EXTRACT,
Lists.newArrayList(REX_BUILDER.makeFlag(TimeUnitRange.QUARTER), stableRexNodes.get(0)));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("QUARTER(`T_1_CED5FEB`.`TIME0`)", rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify function: month(col1) or extract(month from col1)
* RexNode is: EXTRACT(FLAG(MONTH), $0)
*/
@Test
public void testFunctionMonth() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final List<RexNode> stableRexNodes = createStableRexNodes(oriRexToTblColRefMap);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(bingIntRelDataType, SqlStdOperatorTable.EXTRACT,
Lists.newArrayList(REX_BUILDER.makeFlag(TimeUnitRange.MONTH), stableRexNodes.get(0)));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("MONTH(`T_1_CED5FEB`.`TIME0`)", rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify function: week(col1) or extract(week from col1)
* RexNode is: EXTRACT(FLAG(WEEK), $0)
*/
@Test
public void testFunctionWeek() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final List<RexNode> stableRexNodes = createStableRexNodes(oriRexToTblColRefMap);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(bingIntRelDataType, SqlStdOperatorTable.EXTRACT,
Lists.newArrayList(REX_BUILDER.makeFlag(TimeUnitRange.WEEK), stableRexNodes.get(0)));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("WEEK(`T_1_CED5FEB`.`TIME0`)", rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify function: dayofmonth(col1) or extract(day from col1)
* RexNode is: EXTRACT(FLAG(DAY), $0)
*/
@Test
public void testFunctionDayOfMonth() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final List<RexNode> stableRexNodes = createStableRexNodes(oriRexToTblColRefMap);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(bingIntRelDataType, SqlStdOperatorTable.EXTRACT,
Lists.newArrayList(REX_BUILDER.makeFlag(TimeUnitRange.DAY), stableRexNodes.get(0)));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("DAYOFMONTH(`T_1_CED5FEB`.`TIME0`)", rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify function: hour(col1) or extract(hour from col1)
* RexNode is: EXTRACT(FLAG(HOUR), $0)
*/
@Test
public void testFunctionHour() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final List<RexNode> stableRexNodes = createStableRexNodes(oriRexToTblColRefMap);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(bingIntRelDataType, SqlStdOperatorTable.EXTRACT,
Lists.newArrayList(REX_BUILDER.makeFlag(TimeUnitRange.HOUR), stableRexNodes.get(0)));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("HOUR(`T_1_CED5FEB`.`TIME0`)", rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify function: minute(col1) or extract(minute from col1)
* RexNode is: EXTRACT(FLAG(MINUTE), $0)
*/
@Test
public void testFunctionMinute() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final List<RexNode> stableRexNodes = createStableRexNodes(oriRexToTblColRefMap);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(bingIntRelDataType, SqlStdOperatorTable.EXTRACT,
Lists.newArrayList(REX_BUILDER.makeFlag(TimeUnitRange.MINUTE), stableRexNodes.get(0)));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("MINUTE(`T_1_CED5FEB`.`TIME0`)", rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify function: second(col1) or extract(second from col1)
* RexNode is: EXTRACT(FLAG(SECOND), $0)
*/
@Test
public void testFunctionSecond() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final List<RexNode> stableRexNodes = createStableRexNodes(oriRexToTblColRefMap);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(bingIntRelDataType, SqlStdOperatorTable.EXTRACT,
Lists.newArrayList(REX_BUILDER.makeFlag(TimeUnitRange.SECOND), stableRexNodes.get(0)));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("SECOND(`T_1_CED5FEB`.`TIME0`)", rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify function: dayofyear(col1) only, for spark doesn't support extract(doy from col1)
* RexNode is: EXTRACT(FLAG(DOY), $0)
*/
@Test
public void testFunctionOfDayOfYear() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final List<RexNode> stableRexNodes = createStableRexNodes(oriRexToTblColRefMap);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(bingIntRelDataType, SqlStdOperatorTable.EXTRACT,
Lists.newArrayList(REX_BUILDER.makeFlag(TimeUnitRange.DOY), stableRexNodes.get(0)));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("DAYOFYEAR(`T_1_CED5FEB`.`TIME0`)", rexNodeToSqlConverter.convertCall(rexNode));
}
/**
* verify function: dayofweek(col1) only, for spark doesn't support extract(dow from col1)
* RexNode is: EXTRACT(FLAG(DOW), $0)
*/
@Test
public void testFunctionDayOfWeek() {
Map<RexNode, TblColRef> oriRexToTblColRefMap = Maps.newHashMap();
prepareRexToTblColRefOfTimestamp(oriRexToTblColRefMap);
final List<RexNode> stableRexNodes = createStableRexNodes(oriRexToTblColRefMap);
final RexCall rexNode = (RexCall) REX_BUILDER.makeCall(bingIntRelDataType, SqlStdOperatorTable.EXTRACT,
Lists.newArrayList(REX_BUILDER.makeFlag(TimeUnitRange.DOW), stableRexNodes.get(0)));
RexToSqlNodeConverter rexNodeToSqlConverter = new RexToTblColRefTranslator(
Sets.newHashSet(oriRexToTblColRefMap.values()), oriRexToTblColRefMap).new ExtendedRexToSqlNodeConverter(
new RexToTblColRefTranslator.OlapRexSqlStdConvertletTable(rexNode, Maps.newHashMap()));
check("DAYOFWEEK(`T_1_CED5FEB`.`TIME0`)", rexNodeToSqlConverter.convertCall(rexNode));
}
private void check(String expectedStr, SqlNode sqlNode) {
Assert.assertEquals(expectedStr, sqlNode.toString());
}
private List<RexNode> createStableRexNodes(Map<RexNode, TblColRef> oriRexToTblColRefMap) {
final List<RexNode> rexNodes = Lists.newArrayList(oriRexToTblColRefMap.keySet());
rexNodes.sort(Comparator.comparing(RexNode::toString));
return rexNodes;
}
private void prepareRexToTblColRefOfTimestamp(Map<RexNode, TblColRef> rexNodeTblColRefMap) {
ColumnRowType columnRowType = ColumnRowTypeMockUtil.mock("CALCS", "T_1_CED5FEB",
ImmutableList.of(Pair.newPair("TIME0", "timestamp"), //
Pair.newPair("TIME1", "timestamp")));
IntStream.range(0, columnRowType.getAllColumns().size()).forEach(i -> {
RexNode key = new RexInputRef(i, TYPE_FACTORY.createTypeWithNullability(timestampRelDataType, true));
rexNodeTblColRefMap.put(key, columnRowType.getAllColumns().get(i));
});
}
}
|
googleapis/google-cloud-java | 37,746 | java-artifact-registry/proto-google-cloud-artifact-registry-v1/src/main/java/com/google/devtools/artifactregistry/v1/ListRepositoriesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/artifactregistry/v1/repository.proto
// Protobuf Java Version: 3.25.8
package com.google.devtools.artifactregistry.v1;
/**
*
*
* <pre>
* The response from listing repositories.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1.ListRepositoriesResponse}
*/
public final class ListRepositoriesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.devtools.artifactregistry.v1.ListRepositoriesResponse)
ListRepositoriesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListRepositoriesResponse.newBuilder() to construct.
private ListRepositoriesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListRepositoriesResponse() {
repositories_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListRepositoriesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1.RepositoryProto
.internal_static_google_devtools_artifactregistry_v1_ListRepositoriesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1.RepositoryProto
.internal_static_google_devtools_artifactregistry_v1_ListRepositoriesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1.ListRepositoriesResponse.class,
com.google.devtools.artifactregistry.v1.ListRepositoriesResponse.Builder.class);
}
public static final int REPOSITORIES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.devtools.artifactregistry.v1.Repository> repositories_;
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.devtools.artifactregistry.v1.Repository> getRepositoriesList() {
return repositories_;
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.devtools.artifactregistry.v1.RepositoryOrBuilder>
getRepositoriesOrBuilderList() {
return repositories_;
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
@java.lang.Override
public int getRepositoriesCount() {
return repositories_.size();
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
@java.lang.Override
public com.google.devtools.artifactregistry.v1.Repository getRepositories(int index) {
return repositories_.get(index);
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
@java.lang.Override
public com.google.devtools.artifactregistry.v1.RepositoryOrBuilder getRepositoriesOrBuilder(
int index) {
return repositories_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The token to retrieve the next page of repositories, or empty if there are
* no more repositories to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The token to retrieve the next page of repositories, or empty if there are
* no more repositories to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < repositories_.size(); i++) {
output.writeMessage(1, repositories_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < repositories_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, repositories_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.devtools.artifactregistry.v1.ListRepositoriesResponse)) {
return super.equals(obj);
}
com.google.devtools.artifactregistry.v1.ListRepositoriesResponse other =
(com.google.devtools.artifactregistry.v1.ListRepositoriesResponse) obj;
if (!getRepositoriesList().equals(other.getRepositoriesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getRepositoriesCount() > 0) {
hash = (37 * hash) + REPOSITORIES_FIELD_NUMBER;
hash = (53 * hash) + getRepositoriesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.devtools.artifactregistry.v1.ListRepositoriesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.ListRepositoriesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ListRepositoriesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.ListRepositoriesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ListRepositoriesResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.ListRepositoriesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ListRepositoriesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.ListRepositoriesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ListRepositoriesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.ListRepositoriesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ListRepositoriesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.ListRepositoriesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.devtools.artifactregistry.v1.ListRepositoriesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response from listing repositories.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1.ListRepositoriesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.devtools.artifactregistry.v1.ListRepositoriesResponse)
com.google.devtools.artifactregistry.v1.ListRepositoriesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1.RepositoryProto
.internal_static_google_devtools_artifactregistry_v1_ListRepositoriesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1.RepositoryProto
.internal_static_google_devtools_artifactregistry_v1_ListRepositoriesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1.ListRepositoriesResponse.class,
com.google.devtools.artifactregistry.v1.ListRepositoriesResponse.Builder.class);
}
// Construct using com.google.devtools.artifactregistry.v1.ListRepositoriesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (repositoriesBuilder_ == null) {
repositories_ = java.util.Collections.emptyList();
} else {
repositories_ = null;
repositoriesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.devtools.artifactregistry.v1.RepositoryProto
.internal_static_google_devtools_artifactregistry_v1_ListRepositoriesResponse_descriptor;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ListRepositoriesResponse
getDefaultInstanceForType() {
return com.google.devtools.artifactregistry.v1.ListRepositoriesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ListRepositoriesResponse build() {
com.google.devtools.artifactregistry.v1.ListRepositoriesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ListRepositoriesResponse buildPartial() {
com.google.devtools.artifactregistry.v1.ListRepositoriesResponse result =
new com.google.devtools.artifactregistry.v1.ListRepositoriesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.devtools.artifactregistry.v1.ListRepositoriesResponse result) {
if (repositoriesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
repositories_ = java.util.Collections.unmodifiableList(repositories_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.repositories_ = repositories_;
} else {
result.repositories_ = repositoriesBuilder_.build();
}
}
private void buildPartial0(
com.google.devtools.artifactregistry.v1.ListRepositoriesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.devtools.artifactregistry.v1.ListRepositoriesResponse) {
return mergeFrom((com.google.devtools.artifactregistry.v1.ListRepositoriesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.devtools.artifactregistry.v1.ListRepositoriesResponse other) {
if (other
== com.google.devtools.artifactregistry.v1.ListRepositoriesResponse.getDefaultInstance())
return this;
if (repositoriesBuilder_ == null) {
if (!other.repositories_.isEmpty()) {
if (repositories_.isEmpty()) {
repositories_ = other.repositories_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureRepositoriesIsMutable();
repositories_.addAll(other.repositories_);
}
onChanged();
}
} else {
if (!other.repositories_.isEmpty()) {
if (repositoriesBuilder_.isEmpty()) {
repositoriesBuilder_.dispose();
repositoriesBuilder_ = null;
repositories_ = other.repositories_;
bitField0_ = (bitField0_ & ~0x00000001);
repositoriesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getRepositoriesFieldBuilder()
: null;
} else {
repositoriesBuilder_.addAllMessages(other.repositories_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.devtools.artifactregistry.v1.Repository m =
input.readMessage(
com.google.devtools.artifactregistry.v1.Repository.parser(),
extensionRegistry);
if (repositoriesBuilder_ == null) {
ensureRepositoriesIsMutable();
repositories_.add(m);
} else {
repositoriesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.devtools.artifactregistry.v1.Repository> repositories_ =
java.util.Collections.emptyList();
private void ensureRepositoriesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
repositories_ =
new java.util.ArrayList<com.google.devtools.artifactregistry.v1.Repository>(
repositories_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.devtools.artifactregistry.v1.Repository,
com.google.devtools.artifactregistry.v1.Repository.Builder,
com.google.devtools.artifactregistry.v1.RepositoryOrBuilder>
repositoriesBuilder_;
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
public java.util.List<com.google.devtools.artifactregistry.v1.Repository>
getRepositoriesList() {
if (repositoriesBuilder_ == null) {
return java.util.Collections.unmodifiableList(repositories_);
} else {
return repositoriesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
public int getRepositoriesCount() {
if (repositoriesBuilder_ == null) {
return repositories_.size();
} else {
return repositoriesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
public com.google.devtools.artifactregistry.v1.Repository getRepositories(int index) {
if (repositoriesBuilder_ == null) {
return repositories_.get(index);
} else {
return repositoriesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
public Builder setRepositories(
int index, com.google.devtools.artifactregistry.v1.Repository value) {
if (repositoriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRepositoriesIsMutable();
repositories_.set(index, value);
onChanged();
} else {
repositoriesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
public Builder setRepositories(
int index, com.google.devtools.artifactregistry.v1.Repository.Builder builderForValue) {
if (repositoriesBuilder_ == null) {
ensureRepositoriesIsMutable();
repositories_.set(index, builderForValue.build());
onChanged();
} else {
repositoriesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
public Builder addRepositories(com.google.devtools.artifactregistry.v1.Repository value) {
if (repositoriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRepositoriesIsMutable();
repositories_.add(value);
onChanged();
} else {
repositoriesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
public Builder addRepositories(
int index, com.google.devtools.artifactregistry.v1.Repository value) {
if (repositoriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRepositoriesIsMutable();
repositories_.add(index, value);
onChanged();
} else {
repositoriesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
public Builder addRepositories(
com.google.devtools.artifactregistry.v1.Repository.Builder builderForValue) {
if (repositoriesBuilder_ == null) {
ensureRepositoriesIsMutable();
repositories_.add(builderForValue.build());
onChanged();
} else {
repositoriesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
public Builder addRepositories(
int index, com.google.devtools.artifactregistry.v1.Repository.Builder builderForValue) {
if (repositoriesBuilder_ == null) {
ensureRepositoriesIsMutable();
repositories_.add(index, builderForValue.build());
onChanged();
} else {
repositoriesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
public Builder addAllRepositories(
java.lang.Iterable<? extends com.google.devtools.artifactregistry.v1.Repository> values) {
if (repositoriesBuilder_ == null) {
ensureRepositoriesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, repositories_);
onChanged();
} else {
repositoriesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
public Builder clearRepositories() {
if (repositoriesBuilder_ == null) {
repositories_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
repositoriesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
public Builder removeRepositories(int index) {
if (repositoriesBuilder_ == null) {
ensureRepositoriesIsMutable();
repositories_.remove(index);
onChanged();
} else {
repositoriesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
public com.google.devtools.artifactregistry.v1.Repository.Builder getRepositoriesBuilder(
int index) {
return getRepositoriesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
public com.google.devtools.artifactregistry.v1.RepositoryOrBuilder getRepositoriesOrBuilder(
int index) {
if (repositoriesBuilder_ == null) {
return repositories_.get(index);
} else {
return repositoriesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
public java.util.List<? extends com.google.devtools.artifactregistry.v1.RepositoryOrBuilder>
getRepositoriesOrBuilderList() {
if (repositoriesBuilder_ != null) {
return repositoriesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(repositories_);
}
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
public com.google.devtools.artifactregistry.v1.Repository.Builder addRepositoriesBuilder() {
return getRepositoriesFieldBuilder()
.addBuilder(com.google.devtools.artifactregistry.v1.Repository.getDefaultInstance());
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
public com.google.devtools.artifactregistry.v1.Repository.Builder addRepositoriesBuilder(
int index) {
return getRepositoriesFieldBuilder()
.addBuilder(
index, com.google.devtools.artifactregistry.v1.Repository.getDefaultInstance());
}
/**
*
*
* <pre>
* The repositories returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1.Repository repositories = 1;</code>
*/
public java.util.List<com.google.devtools.artifactregistry.v1.Repository.Builder>
getRepositoriesBuilderList() {
return getRepositoriesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.devtools.artifactregistry.v1.Repository,
com.google.devtools.artifactregistry.v1.Repository.Builder,
com.google.devtools.artifactregistry.v1.RepositoryOrBuilder>
getRepositoriesFieldBuilder() {
if (repositoriesBuilder_ == null) {
repositoriesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.devtools.artifactregistry.v1.Repository,
com.google.devtools.artifactregistry.v1.Repository.Builder,
com.google.devtools.artifactregistry.v1.RepositoryOrBuilder>(
repositories_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
repositories_ = null;
}
return repositoriesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The token to retrieve the next page of repositories, or empty if there are
* no more repositories to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The token to retrieve the next page of repositories, or empty if there are
* no more repositories to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The token to retrieve the next page of repositories, or empty if there are
* no more repositories to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The token to retrieve the next page of repositories, or empty if there are
* no more repositories to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The token to retrieve the next page of repositories, or empty if there are
* no more repositories to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.devtools.artifactregistry.v1.ListRepositoriesResponse)
}
// @@protoc_insertion_point(class_scope:google.devtools.artifactregistry.v1.ListRepositoriesResponse)
private static final com.google.devtools.artifactregistry.v1.ListRepositoriesResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.devtools.artifactregistry.v1.ListRepositoriesResponse();
}
public static com.google.devtools.artifactregistry.v1.ListRepositoriesResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListRepositoriesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListRepositoriesResponse>() {
@java.lang.Override
public ListRepositoriesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListRepositoriesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListRepositoriesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ListRepositoriesResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,762 | java-recommender/proto-google-cloud-recommender-v1beta1/src/main/java/com/google/cloud/recommender/v1beta1/ListInsightTypesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/recommender/v1beta1/recommender_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.recommender.v1beta1;
/**
*
*
* <pre>
* Response for the `ListInsightTypes` method.
* </pre>
*
* Protobuf type {@code google.cloud.recommender.v1beta1.ListInsightTypesResponse}
*/
public final class ListInsightTypesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.recommender.v1beta1.ListInsightTypesResponse)
ListInsightTypesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListInsightTypesResponse.newBuilder() to construct.
private ListInsightTypesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListInsightTypesResponse() {
insightTypes_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListInsightTypesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.recommender.v1beta1.RecommenderProto
.internal_static_google_cloud_recommender_v1beta1_ListInsightTypesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.recommender.v1beta1.RecommenderProto
.internal_static_google_cloud_recommender_v1beta1_ListInsightTypesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.recommender.v1beta1.ListInsightTypesResponse.class,
com.google.cloud.recommender.v1beta1.ListInsightTypesResponse.Builder.class);
}
public static final int INSIGHT_TYPES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.recommender.v1beta1.InsightType> insightTypes_;
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.recommender.v1beta1.InsightType> getInsightTypesList() {
return insightTypes_;
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.recommender.v1beta1.InsightTypeOrBuilder>
getInsightTypesOrBuilderList() {
return insightTypes_;
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
@java.lang.Override
public int getInsightTypesCount() {
return insightTypes_.size();
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
@java.lang.Override
public com.google.cloud.recommender.v1beta1.InsightType getInsightTypes(int index) {
return insightTypes_.get(index);
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
@java.lang.Override
public com.google.cloud.recommender.v1beta1.InsightTypeOrBuilder getInsightTypesOrBuilder(
int index) {
return insightTypes_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < insightTypes_.size(); i++) {
output.writeMessage(1, insightTypes_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < insightTypes_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, insightTypes_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.recommender.v1beta1.ListInsightTypesResponse)) {
return super.equals(obj);
}
com.google.cloud.recommender.v1beta1.ListInsightTypesResponse other =
(com.google.cloud.recommender.v1beta1.ListInsightTypesResponse) obj;
if (!getInsightTypesList().equals(other.getInsightTypesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getInsightTypesCount() > 0) {
hash = (37 * hash) + INSIGHT_TYPES_FIELD_NUMBER;
hash = (53 * hash) + getInsightTypesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.recommender.v1beta1.ListInsightTypesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommender.v1beta1.ListInsightTypesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.ListInsightTypesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommender.v1beta1.ListInsightTypesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.ListInsightTypesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.recommender.v1beta1.ListInsightTypesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.ListInsightTypesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.recommender.v1beta1.ListInsightTypesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.ListInsightTypesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.recommender.v1beta1.ListInsightTypesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.recommender.v1beta1.ListInsightTypesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.recommender.v1beta1.ListInsightTypesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.recommender.v1beta1.ListInsightTypesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for the `ListInsightTypes` method.
* </pre>
*
* Protobuf type {@code google.cloud.recommender.v1beta1.ListInsightTypesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.recommender.v1beta1.ListInsightTypesResponse)
com.google.cloud.recommender.v1beta1.ListInsightTypesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.recommender.v1beta1.RecommenderProto
.internal_static_google_cloud_recommender_v1beta1_ListInsightTypesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.recommender.v1beta1.RecommenderProto
.internal_static_google_cloud_recommender_v1beta1_ListInsightTypesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.recommender.v1beta1.ListInsightTypesResponse.class,
com.google.cloud.recommender.v1beta1.ListInsightTypesResponse.Builder.class);
}
// Construct using com.google.cloud.recommender.v1beta1.ListInsightTypesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (insightTypesBuilder_ == null) {
insightTypes_ = java.util.Collections.emptyList();
} else {
insightTypes_ = null;
insightTypesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.recommender.v1beta1.RecommenderProto
.internal_static_google_cloud_recommender_v1beta1_ListInsightTypesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.recommender.v1beta1.ListInsightTypesResponse
getDefaultInstanceForType() {
return com.google.cloud.recommender.v1beta1.ListInsightTypesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.recommender.v1beta1.ListInsightTypesResponse build() {
com.google.cloud.recommender.v1beta1.ListInsightTypesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.recommender.v1beta1.ListInsightTypesResponse buildPartial() {
com.google.cloud.recommender.v1beta1.ListInsightTypesResponse result =
new com.google.cloud.recommender.v1beta1.ListInsightTypesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.recommender.v1beta1.ListInsightTypesResponse result) {
if (insightTypesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
insightTypes_ = java.util.Collections.unmodifiableList(insightTypes_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.insightTypes_ = insightTypes_;
} else {
result.insightTypes_ = insightTypesBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.recommender.v1beta1.ListInsightTypesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.recommender.v1beta1.ListInsightTypesResponse) {
return mergeFrom((com.google.cloud.recommender.v1beta1.ListInsightTypesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.recommender.v1beta1.ListInsightTypesResponse other) {
if (other
== com.google.cloud.recommender.v1beta1.ListInsightTypesResponse.getDefaultInstance())
return this;
if (insightTypesBuilder_ == null) {
if (!other.insightTypes_.isEmpty()) {
if (insightTypes_.isEmpty()) {
insightTypes_ = other.insightTypes_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureInsightTypesIsMutable();
insightTypes_.addAll(other.insightTypes_);
}
onChanged();
}
} else {
if (!other.insightTypes_.isEmpty()) {
if (insightTypesBuilder_.isEmpty()) {
insightTypesBuilder_.dispose();
insightTypesBuilder_ = null;
insightTypes_ = other.insightTypes_;
bitField0_ = (bitField0_ & ~0x00000001);
insightTypesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getInsightTypesFieldBuilder()
: null;
} else {
insightTypesBuilder_.addAllMessages(other.insightTypes_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.recommender.v1beta1.InsightType m =
input.readMessage(
com.google.cloud.recommender.v1beta1.InsightType.parser(),
extensionRegistry);
if (insightTypesBuilder_ == null) {
ensureInsightTypesIsMutable();
insightTypes_.add(m);
} else {
insightTypesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.recommender.v1beta1.InsightType> insightTypes_ =
java.util.Collections.emptyList();
private void ensureInsightTypesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
insightTypes_ =
new java.util.ArrayList<com.google.cloud.recommender.v1beta1.InsightType>(
insightTypes_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.recommender.v1beta1.InsightType,
com.google.cloud.recommender.v1beta1.InsightType.Builder,
com.google.cloud.recommender.v1beta1.InsightTypeOrBuilder>
insightTypesBuilder_;
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
public java.util.List<com.google.cloud.recommender.v1beta1.InsightType> getInsightTypesList() {
if (insightTypesBuilder_ == null) {
return java.util.Collections.unmodifiableList(insightTypes_);
} else {
return insightTypesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
public int getInsightTypesCount() {
if (insightTypesBuilder_ == null) {
return insightTypes_.size();
} else {
return insightTypesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
public com.google.cloud.recommender.v1beta1.InsightType getInsightTypes(int index) {
if (insightTypesBuilder_ == null) {
return insightTypes_.get(index);
} else {
return insightTypesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
public Builder setInsightTypes(
int index, com.google.cloud.recommender.v1beta1.InsightType value) {
if (insightTypesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureInsightTypesIsMutable();
insightTypes_.set(index, value);
onChanged();
} else {
insightTypesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
public Builder setInsightTypes(
int index, com.google.cloud.recommender.v1beta1.InsightType.Builder builderForValue) {
if (insightTypesBuilder_ == null) {
ensureInsightTypesIsMutable();
insightTypes_.set(index, builderForValue.build());
onChanged();
} else {
insightTypesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
public Builder addInsightTypes(com.google.cloud.recommender.v1beta1.InsightType value) {
if (insightTypesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureInsightTypesIsMutable();
insightTypes_.add(value);
onChanged();
} else {
insightTypesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
public Builder addInsightTypes(
int index, com.google.cloud.recommender.v1beta1.InsightType value) {
if (insightTypesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureInsightTypesIsMutable();
insightTypes_.add(index, value);
onChanged();
} else {
insightTypesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
public Builder addInsightTypes(
com.google.cloud.recommender.v1beta1.InsightType.Builder builderForValue) {
if (insightTypesBuilder_ == null) {
ensureInsightTypesIsMutable();
insightTypes_.add(builderForValue.build());
onChanged();
} else {
insightTypesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
public Builder addInsightTypes(
int index, com.google.cloud.recommender.v1beta1.InsightType.Builder builderForValue) {
if (insightTypesBuilder_ == null) {
ensureInsightTypesIsMutable();
insightTypes_.add(index, builderForValue.build());
onChanged();
} else {
insightTypesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
public Builder addAllInsightTypes(
java.lang.Iterable<? extends com.google.cloud.recommender.v1beta1.InsightType> values) {
if (insightTypesBuilder_ == null) {
ensureInsightTypesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, insightTypes_);
onChanged();
} else {
insightTypesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
public Builder clearInsightTypes() {
if (insightTypesBuilder_ == null) {
insightTypes_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
insightTypesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
public Builder removeInsightTypes(int index) {
if (insightTypesBuilder_ == null) {
ensureInsightTypesIsMutable();
insightTypes_.remove(index);
onChanged();
} else {
insightTypesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
public com.google.cloud.recommender.v1beta1.InsightType.Builder getInsightTypesBuilder(
int index) {
return getInsightTypesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
public com.google.cloud.recommender.v1beta1.InsightTypeOrBuilder getInsightTypesOrBuilder(
int index) {
if (insightTypesBuilder_ == null) {
return insightTypes_.get(index);
} else {
return insightTypesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
public java.util.List<? extends com.google.cloud.recommender.v1beta1.InsightTypeOrBuilder>
getInsightTypesOrBuilderList() {
if (insightTypesBuilder_ != null) {
return insightTypesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(insightTypes_);
}
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
public com.google.cloud.recommender.v1beta1.InsightType.Builder addInsightTypesBuilder() {
return getInsightTypesFieldBuilder()
.addBuilder(com.google.cloud.recommender.v1beta1.InsightType.getDefaultInstance());
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
public com.google.cloud.recommender.v1beta1.InsightType.Builder addInsightTypesBuilder(
int index) {
return getInsightTypesFieldBuilder()
.addBuilder(index, com.google.cloud.recommender.v1beta1.InsightType.getDefaultInstance());
}
/**
*
*
* <pre>
* The set of recommenders available
* </pre>
*
* <code>repeated .google.cloud.recommender.v1beta1.InsightType insight_types = 1;</code>
*/
public java.util.List<com.google.cloud.recommender.v1beta1.InsightType.Builder>
getInsightTypesBuilderList() {
return getInsightTypesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.recommender.v1beta1.InsightType,
com.google.cloud.recommender.v1beta1.InsightType.Builder,
com.google.cloud.recommender.v1beta1.InsightTypeOrBuilder>
getInsightTypesFieldBuilder() {
if (insightTypesBuilder_ == null) {
insightTypesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.recommender.v1beta1.InsightType,
com.google.cloud.recommender.v1beta1.InsightType.Builder,
com.google.cloud.recommender.v1beta1.InsightTypeOrBuilder>(
insightTypes_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
insightTypes_ = null;
}
return insightTypesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.recommender.v1beta1.ListInsightTypesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.ListInsightTypesResponse)
private static final com.google.cloud.recommender.v1beta1.ListInsightTypesResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.recommender.v1beta1.ListInsightTypesResponse();
}
public static com.google.cloud.recommender.v1beta1.ListInsightTypesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListInsightTypesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListInsightTypesResponse>() {
@java.lang.Override
public ListInsightTypesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListInsightTypesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListInsightTypesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.recommender.v1beta1.ListInsightTypesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/gravitino | 38,112 | authorizations/authorization-ranger/src/main/java/org/apache/gravitino/authorization/ranger/RangerAuthorizationHadoopSQLPlugin.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.gravitino.authorization.ranger;
import static org.apache.gravitino.authorization.ranger.RangerHadoopSQLMetadataObject.Type.COLUMN;
import static org.apache.gravitino.authorization.ranger.RangerHadoopSQLMetadataObject.Type.SCHEMA;
import static org.apache.gravitino.authorization.ranger.RangerHadoopSQLMetadataObject.Type.TABLE;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.gravitino.MetadataObject;
import org.apache.gravitino.authorization.AuthorizationMetadataObject;
import org.apache.gravitino.authorization.AuthorizationPrivilege;
import org.apache.gravitino.authorization.AuthorizationSecurableObject;
import org.apache.gravitino.authorization.MetadataObjectChange;
import org.apache.gravitino.authorization.Privilege;
import org.apache.gravitino.authorization.SecurableObject;
import org.apache.gravitino.authorization.SecurableObjects;
import org.apache.gravitino.authorization.common.ErrorMessages;
import org.apache.gravitino.authorization.common.RangerAuthorizationProperties;
import org.apache.gravitino.authorization.ranger.RangerPrivileges.RangerHadoopSQLPrivilege;
import org.apache.gravitino.authorization.ranger.reference.RangerDefines.PolicyResource;
import org.apache.gravitino.exceptions.AuthorizationPluginException;
import org.apache.ranger.RangerServiceException;
import org.apache.ranger.plugin.model.RangerPolicy;
import org.apache.ranger.plugin.util.SearchFilter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class RangerAuthorizationHadoopSQLPlugin extends RangerAuthorizationPlugin {
private static final Logger LOG =
LoggerFactory.getLogger(RangerAuthorizationHadoopSQLPlugin.class);
public RangerAuthorizationHadoopSQLPlugin(String metalake, Map<String, String> config) {
super(metalake, config);
}
@Override
/** Set the default mapping Gravitino privilege name to the Ranger rule */
public Map<Privilege.Name, Set<AuthorizationPrivilege>> privilegesMappingRule() {
return ImmutableMap.of(
Privilege.Name.CREATE_CATALOG,
ImmutableSet.of(RangerHadoopSQLPrivilege.CREATE),
Privilege.Name.USE_CATALOG,
ImmutableSet.of(RangerHadoopSQLPrivilege.SELECT),
Privilege.Name.CREATE_SCHEMA,
ImmutableSet.of(RangerHadoopSQLPrivilege.CREATE),
Privilege.Name.USE_SCHEMA,
ImmutableSet.of(RangerHadoopSQLPrivilege.SELECT),
Privilege.Name.CREATE_TABLE,
ImmutableSet.of(RangerHadoopSQLPrivilege.CREATE),
Privilege.Name.MODIFY_TABLE,
ImmutableSet.of(
RangerHadoopSQLPrivilege.READ,
RangerHadoopSQLPrivilege.SELECT,
RangerHadoopSQLPrivilege.UPDATE,
RangerHadoopSQLPrivilege.ALTER,
RangerHadoopSQLPrivilege.WRITE),
Privilege.Name.SELECT_TABLE,
ImmutableSet.of(RangerHadoopSQLPrivilege.READ, RangerHadoopSQLPrivilege.SELECT));
}
/**
* Find the managed policy for the ranger securable object.
*
* @param authzMetadataObject The ranger securable object to find the managed policy.
* @return The managed policy for the metadata object.
*/
@Override
public RangerPolicy findManagedPolicy(AuthorizationMetadataObject authzMetadataObject)
throws AuthorizationPluginException {
List<String> nsMetadataObj = authzMetadataObject.names();
Map<String, String> preciseFilters = new HashMap<>();
for (int i = 0; i < nsMetadataObj.size() && i < policyResourceDefinesRule().size(); i++) {
preciseFilters.put(policyResourceDefinesRule().get(i), nsMetadataObj.get(i));
}
return preciseFindPolicy(authzMetadataObject, preciseFilters);
}
/** Wildcard search the Ranger policies in the different Ranger service. */
@Override
protected List<RangerPolicy> wildcardSearchPolicies(
AuthorizationMetadataObject authzMetadataObject) {
List<String> resourceDefines = policyResourceDefinesRule();
Map<String, String> searchFilters = new HashMap<>();
searchFilters.put(SearchFilter.SERVICE_NAME, rangerServiceName);
for (int i = 0; i < authzMetadataObject.names().size() && i < resourceDefines.size(); i++) {
searchFilters.put(
SearchFilter.RESOURCE_PREFIX + resourceDefines.get(i),
authzMetadataObject.names().get(i));
}
try {
return rangerClient.findPolicies(searchFilters);
} catch (RangerServiceException e) {
throw new AuthorizationPluginException(e, "Failed to find policies in Ranger");
}
}
/**
* If rename the SCHEMA, Need to rename these the relevant policies, `{schema}`, `{schema}.*`,
* `{schema}.*.*` <br>
* If rename the TABLE, Need to rename these the relevant policies, `{schema}.*`, `{schema}.*.*`
* <br>
* If rename the COLUMN, Only need to rename `{schema}.*.*` <br>
*/
@Override
protected void renameMetadataObject(
AuthorizationMetadataObject authzMetadataObject,
AuthorizationMetadataObject newAuthzMetadataObject) {
List<Pair<String, String>> mappingOldAndNewMetadata;
if (newAuthzMetadataObject.type().equals(SCHEMA)) {
// Rename the SCHEMA, Need to rename these the relevant policies, `{schema}`, `{schema}.*`,
// * `{schema}.*.*`
mappingOldAndNewMetadata =
ImmutableList.of(
Pair.of(authzMetadataObject.names().get(0), newAuthzMetadataObject.names().get(0)),
Pair.of(RangerHelper.RESOURCE_ALL, RangerHelper.RESOURCE_ALL),
Pair.of(RangerHelper.RESOURCE_ALL, RangerHelper.RESOURCE_ALL));
} else if (newAuthzMetadataObject.type().equals(TABLE)) {
// Rename the TABLE, Need to rename these the relevant policies, `{schema}.*`, `{schema}.*.*`
mappingOldAndNewMetadata =
ImmutableList.of(
Pair.of(authzMetadataObject.names().get(0), newAuthzMetadataObject.names().get(0)),
Pair.of(authzMetadataObject.names().get(1), newAuthzMetadataObject.names().get(1)),
Pair.of(RangerHelper.RESOURCE_ALL, RangerHelper.RESOURCE_ALL));
} else if (newAuthzMetadataObject.type().equals(COLUMN)) {
// Rename the COLUMN, Only need to rename `{schema}.*.*`
mappingOldAndNewMetadata =
ImmutableList.of(
Pair.of(authzMetadataObject.names().get(0), newAuthzMetadataObject.names().get(0)),
Pair.of(authzMetadataObject.names().get(1), newAuthzMetadataObject.names().get(1)),
Pair.of(authzMetadataObject.names().get(2), newAuthzMetadataObject.names().get(2)));
} else {
throw new IllegalArgumentException(
"Unsupported metadata object type: " + authzMetadataObject.type());
}
List<String> oldMetadataNames = new ArrayList<>();
List<String> newMetadataNames = new ArrayList<>();
for (int index = 0; index < mappingOldAndNewMetadata.size(); index++) {
oldMetadataNames.add(mappingOldAndNewMetadata.get(index).getKey());
newMetadataNames.add(mappingOldAndNewMetadata.get(index).getValue());
AuthorizationMetadataObject.Type type;
if (index == 0) {
type = RangerHadoopSQLMetadataObject.Type.SCHEMA;
} else if (index == 1) {
type = RangerHadoopSQLMetadataObject.Type.TABLE;
} else {
type = RangerHadoopSQLMetadataObject.Type.COLUMN;
}
AuthorizationMetadataObject oldHadoopSQLMetadataObject =
new RangerHadoopSQLMetadataObject(
AuthorizationMetadataObject.getParentFullName(oldMetadataNames),
AuthorizationMetadataObject.getLastName(oldMetadataNames),
type);
AuthorizationMetadataObject newHadoopSQLMetadataObject =
new RangerHadoopSQLMetadataObject(
AuthorizationMetadataObject.getParentFullName(newMetadataNames),
AuthorizationMetadataObject.getLastName(newMetadataNames),
type);
updatePolicyByMetadataObject(
type.metadataObjectType(), oldHadoopSQLMetadataObject, newHadoopSQLMetadataObject);
}
}
@Override
protected void updatePolicyByMetadataObject(
MetadataObject.Type operationType,
AuthorizationMetadataObject oldAuthzMetaObject,
AuthorizationMetadataObject newAuthzMetaObject) {
List<RangerPolicy> oldPolicies = wildcardSearchPolicies(oldAuthzMetaObject);
List<RangerPolicy> existNewPolicies = wildcardSearchPolicies(newAuthzMetaObject);
if (oldPolicies.isEmpty()) {
LOG.warn("Cannot find the Ranger policy for the metadata object({})!", oldAuthzMetaObject);
return;
}
if (!existNewPolicies.isEmpty()) {
LOG.warn("The Ranger policy for the metadata object({}) already exists!", newAuthzMetaObject);
}
Map<MetadataObject.Type, Integer> operationTypeIndex =
ImmutableMap.of(
MetadataObject.Type.SCHEMA, 0,
MetadataObject.Type.TABLE, 1,
MetadataObject.Type.COLUMN, 2);
oldPolicies.stream()
.forEach(
policy -> {
try {
String policyName = policy.getName();
int index = operationTypeIndex.get(operationType);
// Update the policy name is following Gravitino's spec
if (policy
.getName()
.equals(
AuthorizationSecurableObject.DOT_JOINER.join(oldAuthzMetaObject.names()))) {
List<String> policyNames =
Lists.newArrayList(
AuthorizationSecurableObject.DOT_SPLITTER.splitToList(policyName));
Preconditions.checkArgument(
policyNames.size() >= oldAuthzMetaObject.names().size(),
String.format(ErrorMessages.INVALID_POLICY_NAME, policyName));
if (policyNames.get(index).equals(RangerHelper.RESOURCE_ALL)) {
// Doesn't need to rename the policy `*`
return;
}
policyNames.set(index, newAuthzMetaObject.names().get(index));
policy.setName(AuthorizationSecurableObject.DOT_JOINER.join(policyNames));
}
// Update the policy resource name to new name
policy
.getResources()
.put(
policyResourceDefinesRule().get(index),
new RangerPolicy.RangerPolicyResource(
newAuthzMetaObject.names().get(index)));
boolean alreadyExist =
existNewPolicies.stream()
.anyMatch(
existNewPolicy ->
existNewPolicy.getName().equals(policy.getName())
|| existNewPolicy.getResources().equals(policy.getResources()));
if (alreadyExist) {
LOG.warn(
"The Ranger policy for the metadata object ({}) already exists!",
newAuthzMetaObject);
return;
}
// Update the policy
rangerClient.updatePolicy(policy.getId(), policy);
} catch (RangerServiceException e) {
LOG.error("Failed to rename the policy {}!", policy);
throw new RuntimeException(e);
}
});
}
/**
* If remove the SCHEMA, need to remove these the relevant policies, `{schema}`, `{schema}.*`,
* `{schema}.*.*` <br>
* If remove the TABLE, need to remove these the relevant policies, `{schema}.*`, `{schema}.*.*`
* <br>
* If remove the COLUMN, Only need to remove `{schema}.*.*` <br>
*/
@Override
protected void removeMetadataObject(AuthorizationMetadataObject authzMetadataObject) {
AuthorizationMetadataObject.Type type = authzMetadataObject.type();
if (type.equals(SCHEMA)) {
doRemoveSchemaMetadataObject(authzMetadataObject);
} else if (type.equals(TABLE)) {
doRemoveTableMetadataObject(authzMetadataObject);
} else if (type.equals(COLUMN)) {
removePolicyByMetadataObject(authzMetadataObject);
} else {
throw new IllegalArgumentException(
"Unsupported metadata object type: " + authzMetadataObject.type());
}
}
/**
* Remove the SCHEMA, Need to remove these the relevant policies, `{schema}`, `{schema}.*`,
* `{schema}.*.*` permissions.
*/
private void doRemoveSchemaMetadataObject(AuthorizationMetadataObject authzMetadataObject) {
Preconditions.checkArgument(
authzMetadataObject.type() == SCHEMA, "The metadata object type must be a schema");
Preconditions.checkArgument(
authzMetadataObject.names().size() == 1, "The metadata object's name size must be 1");
if (RangerHelper.RESOURCE_ALL.equals(authzMetadataObject.name())) {
// Delete metalake or catalog policies in this Ranger service
try {
List<RangerPolicy> policies = rangerClient.getPoliciesInService(rangerServiceName);
policies.stream()
.filter(RangerHelper::hasGravitinoManagedPolicyItem)
.forEach(rangerHelper::removeAllGravitinoManagedPolicyItem);
} catch (RangerServiceException e) {
throw new RuntimeException(e);
}
} else {
List<Pair<AuthorizationMetadataObject.Type, List<String>>> loop =
ImmutableList.of(
Pair.of(
RangerHadoopSQLMetadataObject.Type.SCHEMA,
ImmutableList.of(authzMetadataObject.name())),
/** SCHEMA permission */
Pair.of(
RangerHadoopSQLMetadataObject.Type.TABLE,
ImmutableList.of(authzMetadataObject.name(), RangerHelper.RESOURCE_ALL)),
/** TABLE permission */
Pair.of(
RangerHadoopSQLMetadataObject.Type.COLUMN,
ImmutableList.of(
authzMetadataObject.name(),
RangerHelper.RESOURCE_ALL,
RangerHelper.RESOURCE_ALL))
/** COLUMN permission */
);
for (int index = 0; index < loop.size(); index++) {
AuthorizationMetadataObject authzMetadataObject1 =
new RangerHadoopSQLMetadataObject(
AuthorizationMetadataObject.getParentFullName(loop.get(index).getValue()),
AuthorizationMetadataObject.getLastName(loop.get(index).getValue()),
loop.get(index).getKey());
removePolicyByMetadataObject(authzMetadataObject1);
}
}
}
/**
* Remove the TABLE, Need to remove these the relevant policies, `*.{table}`, `*.{table}.{column}`
* permissions.
*/
private void doRemoveTableMetadataObject(AuthorizationMetadataObject authzMetadataObject) {
List<Pair<AuthorizationMetadataObject.Type, List<String>>> loop =
ImmutableList.of(
Pair.of(RangerHadoopSQLMetadataObject.Type.TABLE, authzMetadataObject.names()),
/** TABLE permission */
Pair.of(
RangerHadoopSQLMetadataObject.Type.COLUMN,
Stream.concat(
authzMetadataObject.names().stream(), Stream.of(RangerHelper.RESOURCE_ALL))
.collect(Collectors.toList()))
/** COLUMN permission */
);
for (int index = 0; index < loop.size(); index++) {
AuthorizationMetadataObject authzMetadataObject1 =
new RangerHadoopSQLMetadataObject(
AuthorizationMetadataObject.getParentFullName(loop.get(index).getValue()),
AuthorizationMetadataObject.getLastName(loop.get(index).getValue()),
loop.get(index).getKey());
removePolicyByMetadataObject(authzMetadataObject1);
}
}
@Override
/** Set the default owner rule. */
public Set<AuthorizationPrivilege> ownerMappingRule() {
return ImmutableSet.of(RangerHadoopSQLPrivilege.ALL);
}
@Override
/** Set Ranger policy resource rule. */
public List<String> policyResourceDefinesRule() {
return ImmutableList.of(
PolicyResource.DATABASE.getName(),
PolicyResource.TABLE.getName(),
PolicyResource.COLUMN.getName());
}
@Override
protected RangerPolicy createPolicyAddResources(AuthorizationMetadataObject metadataObject) {
RangerPolicy policy = new RangerPolicy();
policy.setService(rangerServiceName);
policy.setName(metadataObject.fullName());
List<String> nsMetadataObject = metadataObject.names();
for (int i = 0; i < nsMetadataObject.size(); i++) {
RangerPolicy.RangerPolicyResource policyResource =
new RangerPolicy.RangerPolicyResource(nsMetadataObject.get(i));
policy.getResources().put(policyResourceDefinesRule().get(i), policyResource);
}
return policy;
}
public AuthorizationSecurableObject generateAuthorizationSecurableObject(
List<String> names,
AuthorizationMetadataObject.Type type,
Set<AuthorizationPrivilege> privileges) {
RangerHadoopSQLMetadataObject object =
new RangerHadoopSQLMetadataObject(
AuthorizationMetadataObject.getParentFullName(names),
AuthorizationMetadataObject.getLastName(names),
type);
return generateAuthorizationSecurableObject(object, privileges);
}
@Override
public AuthorizationSecurableObject generateAuthorizationSecurableObject(
AuthorizationMetadataObject object, Set<AuthorizationPrivilege> privileges) {
object.validateAuthorizationMetadataObject();
return new RangerHadoopSQLSecurableObject(
object.parent(), object.name(), object.type(), privileges);
}
@Override
/** Allow privilege operation defines rule. */
public Set<Privilege.Name> allowPrivilegesRule() {
return ImmutableSet.of(
Privilege.Name.CREATE_CATALOG,
Privilege.Name.USE_CATALOG,
Privilege.Name.CREATE_SCHEMA,
Privilege.Name.USE_SCHEMA,
Privilege.Name.CREATE_TABLE,
Privilege.Name.MODIFY_TABLE,
Privilege.Name.SELECT_TABLE);
}
/**
* Allow Gravitino MetadataObject type defines rule.
*
* @return To allow Gravitino MetadataObject type defines rule.
*/
@Override
public Set<MetadataObject.Type> allowMetadataObjectTypesRule() {
return ImmutableSet.of(
MetadataObject.Type.METALAKE,
MetadataObject.Type.CATALOG,
MetadataObject.Type.SCHEMA,
MetadataObject.Type.TABLE,
MetadataObject.Type.COLUMN);
}
/** Translate the Gravitino securable object to the Ranger owner securable object. */
@Override
public List<AuthorizationSecurableObject> translateOwner(MetadataObject gravitinoMetadataObject) {
List<AuthorizationSecurableObject> rangerSecurableObjects = new ArrayList<>();
switch (gravitinoMetadataObject.type()) {
case METALAKE:
case CATALOG:
// Add `*` for the SCHEMA permission
rangerSecurableObjects.add(
generateAuthorizationSecurableObject(
ImmutableList.of(RangerHelper.RESOURCE_ALL),
RangerHadoopSQLMetadataObject.Type.SCHEMA,
ownerMappingRule()));
// Add `*.*` for the TABLE permission
rangerSecurableObjects.add(
generateAuthorizationSecurableObject(
ImmutableList.of(RangerHelper.RESOURCE_ALL, RangerHelper.RESOURCE_ALL),
RangerHadoopSQLMetadataObject.Type.TABLE,
ownerMappingRule()));
// Add `*.*.*` for the COLUMN permission
rangerSecurableObjects.add(
generateAuthorizationSecurableObject(
ImmutableList.of(
RangerHelper.RESOURCE_ALL,
RangerHelper.RESOURCE_ALL,
RangerHelper.RESOURCE_ALL),
RangerHadoopSQLMetadataObject.Type.COLUMN,
ownerMappingRule()));
break;
case SCHEMA:
// Add `{schema}` for the SCHEMA permission
rangerSecurableObjects.add(
generateAuthorizationSecurableObject(
ImmutableList.of(gravitinoMetadataObject.name() /*Schema name*/),
RangerHadoopSQLMetadataObject.Type.SCHEMA,
ownerMappingRule()));
// Add `{schema}.*` for the TABLE permission
rangerSecurableObjects.add(
generateAuthorizationSecurableObject(
ImmutableList.of(
gravitinoMetadataObject.name() /*Schema name*/, RangerHelper.RESOURCE_ALL),
RangerHadoopSQLMetadataObject.Type.TABLE,
ownerMappingRule()));
// Add `{schema}.*.*` for the COLUMN permission
rangerSecurableObjects.add(
generateAuthorizationSecurableObject(
ImmutableList.of(
gravitinoMetadataObject.name() /*Schema name*/,
RangerHelper.RESOURCE_ALL,
RangerHelper.RESOURCE_ALL),
RangerHadoopSQLMetadataObject.Type.COLUMN,
ownerMappingRule()));
break;
case TABLE:
translateMetadataObject(gravitinoMetadataObject).stream()
.forEach(
rangerMetadataObject -> {
// Add `{schema}.{table}` for the TABLE permission
rangerSecurableObjects.add(
generateAuthorizationSecurableObject(
rangerMetadataObject.names(),
RangerHadoopSQLMetadataObject.Type.TABLE,
ownerMappingRule()));
// Add `{schema}.{table}.*` for the COLUMN permission
rangerSecurableObjects.add(
generateAuthorizationSecurableObject(
Stream.concat(
rangerMetadataObject.names().stream(),
Stream.of(RangerHelper.RESOURCE_ALL))
.collect(Collectors.toList()),
RangerHadoopSQLMetadataObject.Type.COLUMN,
ownerMappingRule()));
});
break;
default:
throw new AuthorizationPluginException(
ErrorMessages.OWNER_PRIVILEGE_NOT_SUPPORTED, gravitinoMetadataObject.type());
}
return rangerSecurableObjects;
}
/** Translate the Gravitino securable object to the Ranger securable object. */
@Override
public List<AuthorizationSecurableObject> translatePrivilege(SecurableObject securableObject) {
List<AuthorizationSecurableObject> rangerSecurableObjects = new ArrayList<>();
securableObject.privileges().stream()
.filter(Objects::nonNull)
.forEach(
gravitinoPrivilege -> {
Set<AuthorizationPrivilege> rangerPrivileges = new HashSet<>();
// Ignore unsupported privileges
if (!privilegesMappingRule().containsKey(gravitinoPrivilege.name())) {
return;
}
privilegesMappingRule().get(gravitinoPrivilege.name()).stream()
.forEach(
rangerPrivilege ->
rangerPrivileges.add(
new RangerPrivileges.RangerHivePrivilegeImpl(
rangerPrivilege, gravitinoPrivilege.condition())));
switch (gravitinoPrivilege.name()) {
case CREATE_CATALOG:
// Ignore the Gravitino privilege `CREATE_CATALOG` in the
// RangerAuthorizationHivePlugin
break;
case USE_CATALOG:
switch (securableObject.type()) {
case METALAKE:
case CATALOG:
// Add Ranger privilege(`SELECT`) to SCHEMA(`*`)
rangerSecurableObjects.add(
generateAuthorizationSecurableObject(
ImmutableList.of(RangerHelper.RESOURCE_ALL),
RangerHadoopSQLMetadataObject.Type.SCHEMA,
rangerPrivileges));
break;
default:
throw new AuthorizationPluginException(
ErrorMessages.PRIVILEGE_NOT_SUPPORTED,
gravitinoPrivilege.name(),
securableObject.type());
}
break;
case CREATE_SCHEMA:
switch (securableObject.type()) {
case METALAKE:
case CATALOG:
// Add Ranger privilege(`CREATE`) to SCHEMA(`*`)
rangerSecurableObjects.add(
generateAuthorizationSecurableObject(
ImmutableList.of(RangerHelper.RESOURCE_ALL),
RangerHadoopSQLMetadataObject.Type.SCHEMA,
rangerPrivileges));
break;
default:
throw new AuthorizationPluginException(
ErrorMessages.PRIVILEGE_NOT_SUPPORTED,
gravitinoPrivilege.name(),
securableObject.type());
}
break;
case USE_SCHEMA:
switch (securableObject.type()) {
case METALAKE:
case CATALOG:
// Add Ranger privilege(`SELECT`) to SCHEMA(`*`)
rangerSecurableObjects.add(
generateAuthorizationSecurableObject(
ImmutableList.of(RangerHelper.RESOURCE_ALL),
RangerHadoopSQLMetadataObject.Type.SCHEMA,
rangerPrivileges));
break;
case SCHEMA:
// Add Ranger privilege(`SELECT`) to SCHEMA(`{schema}`)
rangerSecurableObjects.add(
generateAuthorizationSecurableObject(
ImmutableList.of(securableObject.name() /*Schema name*/),
RangerHadoopSQLMetadataObject.Type.SCHEMA,
rangerPrivileges));
break;
default:
throw new AuthorizationPluginException(
ErrorMessages.PRIVILEGE_NOT_SUPPORTED,
gravitinoPrivilege.name(),
securableObject.type());
}
break;
case CREATE_TABLE:
case MODIFY_TABLE:
case SELECT_TABLE:
switch (securableObject.type()) {
case METALAKE:
case CATALOG:
// Add `*.*` for the TABLE permission
rangerSecurableObjects.add(
generateAuthorizationSecurableObject(
ImmutableList.of(
RangerHelper.RESOURCE_ALL, RangerHelper.RESOURCE_ALL),
RangerHadoopSQLMetadataObject.Type.TABLE,
rangerPrivileges));
// Add `*.*.*` for the COLUMN permission
rangerSecurableObjects.add(
generateAuthorizationSecurableObject(
ImmutableList.of(
RangerHelper.RESOURCE_ALL,
RangerHelper.RESOURCE_ALL,
RangerHelper.RESOURCE_ALL),
RangerHadoopSQLMetadataObject.Type.COLUMN,
rangerPrivileges));
break;
case SCHEMA:
// Add `{schema}.*` for the TABLE permission
rangerSecurableObjects.add(
generateAuthorizationSecurableObject(
ImmutableList.of(
securableObject.name() /*Schema name*/,
RangerHelper.RESOURCE_ALL),
RangerHadoopSQLMetadataObject.Type.TABLE,
rangerPrivileges));
// Add `{schema}.*.*` for the COLUMN permission
rangerSecurableObjects.add(
generateAuthorizationSecurableObject(
ImmutableList.of(
securableObject.name() /*Schema name*/,
RangerHelper.RESOURCE_ALL,
RangerHelper.RESOURCE_ALL),
RangerHadoopSQLMetadataObject.Type.COLUMN,
rangerPrivileges));
break;
case TABLE:
if (gravitinoPrivilege.name() == Privilege.Name.CREATE_TABLE) {
throw new AuthorizationPluginException(
ErrorMessages.PRIVILEGE_NOT_SUPPORTED,
gravitinoPrivilege.name(),
securableObject.type());
} else {
translateMetadataObject(securableObject).stream()
.forEach(
rangerMetadataObject -> {
// Add `{schema}.{table}` for the TABLE permission
rangerSecurableObjects.add(
generateAuthorizationSecurableObject(
rangerMetadataObject.names(),
RangerHadoopSQLMetadataObject.Type.TABLE,
rangerPrivileges));
// Add `{schema}.{table}.*` for the COLUMN permission
rangerSecurableObjects.add(
generateAuthorizationSecurableObject(
Stream.concat(
rangerMetadataObject.names().stream(),
Stream.of(RangerHelper.RESOURCE_ALL))
.collect(Collectors.toList()),
RangerHadoopSQLMetadataObject.Type.COLUMN,
rangerPrivileges));
});
}
break;
default:
LOG.warn(
ErrorMessages.PRIVILEGE_NOT_SUPPORTED,
gravitinoPrivilege.name(),
securableObject.type());
}
break;
default:
LOG.warn(
ErrorMessages.PRIVILEGE_NOT_SUPPORTED,
gravitinoPrivilege.name(),
securableObject.type());
}
});
return rangerSecurableObjects;
}
/**
* Because the Ranger metadata object is different from the Gravitino metadata object, we need to
* convert the Gravitino metadata object to the Ranger metadata object.
*/
@Override
public List<AuthorizationMetadataObject> translateMetadataObject(MetadataObject metadataObject) {
Preconditions.checkArgument(
!(metadataObject instanceof RangerPrivileges),
"The metadata object must not be a RangerPrivileges object.");
List<String> nsMetadataObject =
Lists.newArrayList(SecurableObjects.DOT_SPLITTER.splitToList(metadataObject.fullName()));
Preconditions.checkArgument(
nsMetadataObject.size() > 0, "The metadata object must have at least one name.");
AuthorizationMetadataObject.Type type;
if (metadataObject.type() == MetadataObject.Type.METALAKE
|| metadataObject.type() == MetadataObject.Type.CATALOG) {
nsMetadataObject.clear();
nsMetadataObject.add(RangerHelper.RESOURCE_ALL);
type = RangerHadoopSQLMetadataObject.Type.SCHEMA;
} else {
nsMetadataObject.remove(0); // Remove the catalog name
type = RangerHadoopSQLMetadataObject.Type.fromMetadataType(metadataObject.type());
}
RangerHadoopSQLMetadataObject rangerHadoopSQLMetadataObject =
new RangerHadoopSQLMetadataObject(
AuthorizationMetadataObject.getParentFullName(nsMetadataObject),
AuthorizationMetadataObject.getLastName(nsMetadataObject),
type);
rangerHadoopSQLMetadataObject.validateAuthorizationMetadataObject();
return ImmutableList.of(rangerHadoopSQLMetadataObject);
}
@Override
public Boolean onMetadataUpdated(MetadataObjectChange... changes) throws RuntimeException {
for (MetadataObjectChange change : changes) {
if (change instanceof MetadataObjectChange.RenameMetadataObject) {
MetadataObject metadataObject =
((MetadataObjectChange.RenameMetadataObject) change).metadataObject();
MetadataObject newMetadataObject =
((MetadataObjectChange.RenameMetadataObject) change).newMetadataObject();
Preconditions.checkArgument(
metadataObject.type() == newMetadataObject.type(),
"The old and new metadata object types must be equal!");
if (metadataObject.type() == MetadataObject.Type.METALAKE) {
// Rename the metalake name
this.metalake = newMetadataObject.name();
// Did not need to update the Ranger policy
continue;
} else if (metadataObject.type() == MetadataObject.Type.CATALOG) {
// Did not need to update the Ranger policy
continue;
}
List<AuthorizationMetadataObject> oldAuthzMetadataObjects =
translateMetadataObject(metadataObject);
List<AuthorizationMetadataObject> newAuthzMetadataObjects =
translateMetadataObject(newMetadataObject);
Preconditions.checkArgument(
oldAuthzMetadataObjects.size() == newAuthzMetadataObjects.size(),
"The old and new metadata objects sizes must be equal!");
for (int i = 0; i < oldAuthzMetadataObjects.size(); i++) {
AuthorizationMetadataObject oldAuthMetadataObject = oldAuthzMetadataObjects.get(i);
AuthorizationMetadataObject newAuthzMetadataObject = newAuthzMetadataObjects.get(i);
if (oldAuthMetadataObject.equals(newAuthzMetadataObject)) {
LOG.info(
"The metadata object({}) and new metadata object({}) are equal, so ignoring rename!",
oldAuthMetadataObject.fullName(),
newAuthzMetadataObject.fullName());
continue;
}
renameMetadataObject(oldAuthMetadataObject, newAuthzMetadataObject);
}
} else if (change instanceof MetadataObjectChange.RemoveMetadataObject) {
MetadataObject metadataObject =
((MetadataObjectChange.RemoveMetadataObject) change).metadataObject();
List<AuthorizationMetadataObject> authzMetadataObjects =
translateMetadataObject(metadataObject);
authzMetadataObjects.stream().forEach(this::removeMetadataObject);
} else {
throw new IllegalArgumentException(
"Unsupported metadata object change type: "
+ (change == null ? "null" : change.getClass().getSimpleName()));
}
}
return Boolean.TRUE;
}
@Override
protected String getServiceType() {
return HADOOP_SQL_SERVICE_TYPE;
}
@Override
protected Map<String, String> getServiceConfigs(Map<String, String> config) {
return ImmutableMap.<String, String>builder()
.put(
RangerAuthorizationProperties.RANGER_USERNAME.substring(getPrefixLength()),
config.get(RangerAuthorizationProperties.RANGER_USERNAME))
.put(
RangerAuthorizationProperties.RANGER_PASSWORD.substring(getPrefixLength()),
config.get(RangerAuthorizationProperties.RANGER_PASSWORD))
.put(
RangerAuthorizationProperties.JDBC_DRIVER_CLASS_NAME.substring(getPrefixLength()),
getConfValue(
config,
RangerAuthorizationProperties.JDBC_DRIVER_CLASS_NAME,
RangerAuthorizationProperties.DEFAULT_JDBC_DRIVER_CLASS_NAME))
.put(
RangerAuthorizationProperties.JDBC_URL.substring(getPrefixLength()),
getConfValue(
config,
RangerAuthorizationProperties.JDBC_URL,
RangerAuthorizationProperties.DEFAULT_JDBC_URL))
.build();
}
}
|
googleapis/google-cloud-java | 37,773 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ListPipelineJobsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/pipeline_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Response message for
* [PipelineService.ListPipelineJobs][google.cloud.aiplatform.v1.PipelineService.ListPipelineJobs]
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ListPipelineJobsResponse}
*/
public final class ListPipelineJobsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ListPipelineJobsResponse)
ListPipelineJobsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListPipelineJobsResponse.newBuilder() to construct.
private ListPipelineJobsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListPipelineJobsResponse() {
pipelineJobs_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListPipelineJobsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.PipelineServiceProto
.internal_static_google_cloud_aiplatform_v1_ListPipelineJobsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.PipelineServiceProto
.internal_static_google_cloud_aiplatform_v1_ListPipelineJobsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ListPipelineJobsResponse.class,
com.google.cloud.aiplatform.v1.ListPipelineJobsResponse.Builder.class);
}
public static final int PIPELINE_JOBS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1.PipelineJob> pipelineJobs_;
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1.PipelineJob> getPipelineJobsList() {
return pipelineJobs_;
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.aiplatform.v1.PipelineJobOrBuilder>
getPipelineJobsOrBuilderList() {
return pipelineJobs_;
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
@java.lang.Override
public int getPipelineJobsCount() {
return pipelineJobs_.size();
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.PipelineJob getPipelineJobs(int index) {
return pipelineJobs_.get(index);
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.PipelineJobOrBuilder getPipelineJobsOrBuilder(int index) {
return pipelineJobs_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListPipelineJobsRequest.page_token][google.cloud.aiplatform.v1.ListPipelineJobsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListPipelineJobsRequest.page_token][google.cloud.aiplatform.v1.ListPipelineJobsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < pipelineJobs_.size(); i++) {
output.writeMessage(1, pipelineJobs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < pipelineJobs_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, pipelineJobs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.ListPipelineJobsResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.ListPipelineJobsResponse other =
(com.google.cloud.aiplatform.v1.ListPipelineJobsResponse) obj;
if (!getPipelineJobsList().equals(other.getPipelineJobsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getPipelineJobsCount() > 0) {
hash = (37 * hash) + PIPELINE_JOBS_FIELD_NUMBER;
hash = (53 * hash) + getPipelineJobsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.ListPipelineJobsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListPipelineJobsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListPipelineJobsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListPipelineJobsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListPipelineJobsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListPipelineJobsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListPipelineJobsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListPipelineJobsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListPipelineJobsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListPipelineJobsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListPipelineJobsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListPipelineJobsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.ListPipelineJobsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [PipelineService.ListPipelineJobs][google.cloud.aiplatform.v1.PipelineService.ListPipelineJobs]
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ListPipelineJobsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ListPipelineJobsResponse)
com.google.cloud.aiplatform.v1.ListPipelineJobsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.PipelineServiceProto
.internal_static_google_cloud_aiplatform_v1_ListPipelineJobsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.PipelineServiceProto
.internal_static_google_cloud_aiplatform_v1_ListPipelineJobsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ListPipelineJobsResponse.class,
com.google.cloud.aiplatform.v1.ListPipelineJobsResponse.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.ListPipelineJobsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (pipelineJobsBuilder_ == null) {
pipelineJobs_ = java.util.Collections.emptyList();
} else {
pipelineJobs_ = null;
pipelineJobsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.PipelineServiceProto
.internal_static_google_cloud_aiplatform_v1_ListPipelineJobsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListPipelineJobsResponse getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.ListPipelineJobsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListPipelineJobsResponse build() {
com.google.cloud.aiplatform.v1.ListPipelineJobsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListPipelineJobsResponse buildPartial() {
com.google.cloud.aiplatform.v1.ListPipelineJobsResponse result =
new com.google.cloud.aiplatform.v1.ListPipelineJobsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1.ListPipelineJobsResponse result) {
if (pipelineJobsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
pipelineJobs_ = java.util.Collections.unmodifiableList(pipelineJobs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.pipelineJobs_ = pipelineJobs_;
} else {
result.pipelineJobs_ = pipelineJobsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.aiplatform.v1.ListPipelineJobsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.ListPipelineJobsResponse) {
return mergeFrom((com.google.cloud.aiplatform.v1.ListPipelineJobsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.ListPipelineJobsResponse other) {
if (other == com.google.cloud.aiplatform.v1.ListPipelineJobsResponse.getDefaultInstance())
return this;
if (pipelineJobsBuilder_ == null) {
if (!other.pipelineJobs_.isEmpty()) {
if (pipelineJobs_.isEmpty()) {
pipelineJobs_ = other.pipelineJobs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensurePipelineJobsIsMutable();
pipelineJobs_.addAll(other.pipelineJobs_);
}
onChanged();
}
} else {
if (!other.pipelineJobs_.isEmpty()) {
if (pipelineJobsBuilder_.isEmpty()) {
pipelineJobsBuilder_.dispose();
pipelineJobsBuilder_ = null;
pipelineJobs_ = other.pipelineJobs_;
bitField0_ = (bitField0_ & ~0x00000001);
pipelineJobsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getPipelineJobsFieldBuilder()
: null;
} else {
pipelineJobsBuilder_.addAllMessages(other.pipelineJobs_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1.PipelineJob m =
input.readMessage(
com.google.cloud.aiplatform.v1.PipelineJob.parser(), extensionRegistry);
if (pipelineJobsBuilder_ == null) {
ensurePipelineJobsIsMutable();
pipelineJobs_.add(m);
} else {
pipelineJobsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.aiplatform.v1.PipelineJob> pipelineJobs_ =
java.util.Collections.emptyList();
private void ensurePipelineJobsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
pipelineJobs_ =
new java.util.ArrayList<com.google.cloud.aiplatform.v1.PipelineJob>(pipelineJobs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.PipelineJob,
com.google.cloud.aiplatform.v1.PipelineJob.Builder,
com.google.cloud.aiplatform.v1.PipelineJobOrBuilder>
pipelineJobsBuilder_;
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.PipelineJob> getPipelineJobsList() {
if (pipelineJobsBuilder_ == null) {
return java.util.Collections.unmodifiableList(pipelineJobs_);
} else {
return pipelineJobsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
public int getPipelineJobsCount() {
if (pipelineJobsBuilder_ == null) {
return pipelineJobs_.size();
} else {
return pipelineJobsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
public com.google.cloud.aiplatform.v1.PipelineJob getPipelineJobs(int index) {
if (pipelineJobsBuilder_ == null) {
return pipelineJobs_.get(index);
} else {
return pipelineJobsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
public Builder setPipelineJobs(int index, com.google.cloud.aiplatform.v1.PipelineJob value) {
if (pipelineJobsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePipelineJobsIsMutable();
pipelineJobs_.set(index, value);
onChanged();
} else {
pipelineJobsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
public Builder setPipelineJobs(
int index, com.google.cloud.aiplatform.v1.PipelineJob.Builder builderForValue) {
if (pipelineJobsBuilder_ == null) {
ensurePipelineJobsIsMutable();
pipelineJobs_.set(index, builderForValue.build());
onChanged();
} else {
pipelineJobsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
public Builder addPipelineJobs(com.google.cloud.aiplatform.v1.PipelineJob value) {
if (pipelineJobsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePipelineJobsIsMutable();
pipelineJobs_.add(value);
onChanged();
} else {
pipelineJobsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
public Builder addPipelineJobs(int index, com.google.cloud.aiplatform.v1.PipelineJob value) {
if (pipelineJobsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePipelineJobsIsMutable();
pipelineJobs_.add(index, value);
onChanged();
} else {
pipelineJobsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
public Builder addPipelineJobs(
com.google.cloud.aiplatform.v1.PipelineJob.Builder builderForValue) {
if (pipelineJobsBuilder_ == null) {
ensurePipelineJobsIsMutable();
pipelineJobs_.add(builderForValue.build());
onChanged();
} else {
pipelineJobsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
public Builder addPipelineJobs(
int index, com.google.cloud.aiplatform.v1.PipelineJob.Builder builderForValue) {
if (pipelineJobsBuilder_ == null) {
ensurePipelineJobsIsMutable();
pipelineJobs_.add(index, builderForValue.build());
onChanged();
} else {
pipelineJobsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
public Builder addAllPipelineJobs(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.PipelineJob> values) {
if (pipelineJobsBuilder_ == null) {
ensurePipelineJobsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, pipelineJobs_);
onChanged();
} else {
pipelineJobsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
public Builder clearPipelineJobs() {
if (pipelineJobsBuilder_ == null) {
pipelineJobs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
pipelineJobsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
public Builder removePipelineJobs(int index) {
if (pipelineJobsBuilder_ == null) {
ensurePipelineJobsIsMutable();
pipelineJobs_.remove(index);
onChanged();
} else {
pipelineJobsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
public com.google.cloud.aiplatform.v1.PipelineJob.Builder getPipelineJobsBuilder(int index) {
return getPipelineJobsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
public com.google.cloud.aiplatform.v1.PipelineJobOrBuilder getPipelineJobsOrBuilder(int index) {
if (pipelineJobsBuilder_ == null) {
return pipelineJobs_.get(index);
} else {
return pipelineJobsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
public java.util.List<? extends com.google.cloud.aiplatform.v1.PipelineJobOrBuilder>
getPipelineJobsOrBuilderList() {
if (pipelineJobsBuilder_ != null) {
return pipelineJobsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(pipelineJobs_);
}
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
public com.google.cloud.aiplatform.v1.PipelineJob.Builder addPipelineJobsBuilder() {
return getPipelineJobsFieldBuilder()
.addBuilder(com.google.cloud.aiplatform.v1.PipelineJob.getDefaultInstance());
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
public com.google.cloud.aiplatform.v1.PipelineJob.Builder addPipelineJobsBuilder(int index) {
return getPipelineJobsFieldBuilder()
.addBuilder(index, com.google.cloud.aiplatform.v1.PipelineJob.getDefaultInstance());
}
/**
*
*
* <pre>
* List of PipelineJobs in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.PipelineJob pipeline_jobs = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.PipelineJob.Builder>
getPipelineJobsBuilderList() {
return getPipelineJobsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.PipelineJob,
com.google.cloud.aiplatform.v1.PipelineJob.Builder,
com.google.cloud.aiplatform.v1.PipelineJobOrBuilder>
getPipelineJobsFieldBuilder() {
if (pipelineJobsBuilder_ == null) {
pipelineJobsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.PipelineJob,
com.google.cloud.aiplatform.v1.PipelineJob.Builder,
com.google.cloud.aiplatform.v1.PipelineJobOrBuilder>(
pipelineJobs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
pipelineJobs_ = null;
}
return pipelineJobsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListPipelineJobsRequest.page_token][google.cloud.aiplatform.v1.ListPipelineJobsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListPipelineJobsRequest.page_token][google.cloud.aiplatform.v1.ListPipelineJobsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListPipelineJobsRequest.page_token][google.cloud.aiplatform.v1.ListPipelineJobsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListPipelineJobsRequest.page_token][google.cloud.aiplatform.v1.ListPipelineJobsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListPipelineJobsRequest.page_token][google.cloud.aiplatform.v1.ListPipelineJobsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ListPipelineJobsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ListPipelineJobsResponse)
private static final com.google.cloud.aiplatform.v1.ListPipelineJobsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ListPipelineJobsResponse();
}
public static com.google.cloud.aiplatform.v1.ListPipelineJobsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListPipelineJobsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListPipelineJobsResponse>() {
@java.lang.Override
public ListPipelineJobsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListPipelineJobsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListPipelineJobsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListPipelineJobsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,739 | java-datastream/proto-google-cloud-datastream-v1/src/main/java/com/google/cloud/datastream/v1/SalesforceObject.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datastream/v1/datastream_resources.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datastream.v1;
/**
*
*
* <pre>
* Salesforce object.
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1.SalesforceObject}
*/
public final class SalesforceObject extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datastream.v1.SalesforceObject)
SalesforceObjectOrBuilder {
private static final long serialVersionUID = 0L;
// Use SalesforceObject.newBuilder() to construct.
private SalesforceObject(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SalesforceObject() {
objectName_ = "";
fields_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SalesforceObject();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datastream.v1.DatastreamResourcesProto
.internal_static_google_cloud_datastream_v1_SalesforceObject_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datastream.v1.DatastreamResourcesProto
.internal_static_google_cloud_datastream_v1_SalesforceObject_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datastream.v1.SalesforceObject.class,
com.google.cloud.datastream.v1.SalesforceObject.Builder.class);
}
public static final int OBJECT_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object objectName_ = "";
/**
*
*
* <pre>
* Object name.
* </pre>
*
* <code>string object_name = 1;</code>
*
* @return The objectName.
*/
@java.lang.Override
public java.lang.String getObjectName() {
java.lang.Object ref = objectName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
objectName_ = s;
return s;
}
}
/**
*
*
* <pre>
* Object name.
* </pre>
*
* <code>string object_name = 1;</code>
*
* @return The bytes for objectName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getObjectNameBytes() {
java.lang.Object ref = objectName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
objectName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FIELDS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.datastream.v1.SalesforceField> fields_;
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.datastream.v1.SalesforceField> getFieldsList() {
return fields_;
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.datastream.v1.SalesforceFieldOrBuilder>
getFieldsOrBuilderList() {
return fields_;
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
@java.lang.Override
public int getFieldsCount() {
return fields_.size();
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
@java.lang.Override
public com.google.cloud.datastream.v1.SalesforceField getFields(int index) {
return fields_.get(index);
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
@java.lang.Override
public com.google.cloud.datastream.v1.SalesforceFieldOrBuilder getFieldsOrBuilder(int index) {
return fields_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(objectName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, objectName_);
}
for (int i = 0; i < fields_.size(); i++) {
output.writeMessage(2, fields_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(objectName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, objectName_);
}
for (int i = 0; i < fields_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, fields_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datastream.v1.SalesforceObject)) {
return super.equals(obj);
}
com.google.cloud.datastream.v1.SalesforceObject other =
(com.google.cloud.datastream.v1.SalesforceObject) obj;
if (!getObjectName().equals(other.getObjectName())) return false;
if (!getFieldsList().equals(other.getFieldsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + OBJECT_NAME_FIELD_NUMBER;
hash = (53 * hash) + getObjectName().hashCode();
if (getFieldsCount() > 0) {
hash = (37 * hash) + FIELDS_FIELD_NUMBER;
hash = (53 * hash) + getFieldsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datastream.v1.SalesforceObject parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1.SalesforceObject parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1.SalesforceObject parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1.SalesforceObject parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1.SalesforceObject parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1.SalesforceObject parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1.SalesforceObject parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1.SalesforceObject parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datastream.v1.SalesforceObject parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1.SalesforceObject parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datastream.v1.SalesforceObject parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1.SalesforceObject parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.datastream.v1.SalesforceObject prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Salesforce object.
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1.SalesforceObject}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datastream.v1.SalesforceObject)
com.google.cloud.datastream.v1.SalesforceObjectOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datastream.v1.DatastreamResourcesProto
.internal_static_google_cloud_datastream_v1_SalesforceObject_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datastream.v1.DatastreamResourcesProto
.internal_static_google_cloud_datastream_v1_SalesforceObject_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datastream.v1.SalesforceObject.class,
com.google.cloud.datastream.v1.SalesforceObject.Builder.class);
}
// Construct using com.google.cloud.datastream.v1.SalesforceObject.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
objectName_ = "";
if (fieldsBuilder_ == null) {
fields_ = java.util.Collections.emptyList();
} else {
fields_ = null;
fieldsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datastream.v1.DatastreamResourcesProto
.internal_static_google_cloud_datastream_v1_SalesforceObject_descriptor;
}
@java.lang.Override
public com.google.cloud.datastream.v1.SalesforceObject getDefaultInstanceForType() {
return com.google.cloud.datastream.v1.SalesforceObject.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datastream.v1.SalesforceObject build() {
com.google.cloud.datastream.v1.SalesforceObject result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datastream.v1.SalesforceObject buildPartial() {
com.google.cloud.datastream.v1.SalesforceObject result =
new com.google.cloud.datastream.v1.SalesforceObject(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.datastream.v1.SalesforceObject result) {
if (fieldsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
fields_ = java.util.Collections.unmodifiableList(fields_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.fields_ = fields_;
} else {
result.fields_ = fieldsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.datastream.v1.SalesforceObject result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.objectName_ = objectName_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datastream.v1.SalesforceObject) {
return mergeFrom((com.google.cloud.datastream.v1.SalesforceObject) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datastream.v1.SalesforceObject other) {
if (other == com.google.cloud.datastream.v1.SalesforceObject.getDefaultInstance())
return this;
if (!other.getObjectName().isEmpty()) {
objectName_ = other.objectName_;
bitField0_ |= 0x00000001;
onChanged();
}
if (fieldsBuilder_ == null) {
if (!other.fields_.isEmpty()) {
if (fields_.isEmpty()) {
fields_ = other.fields_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureFieldsIsMutable();
fields_.addAll(other.fields_);
}
onChanged();
}
} else {
if (!other.fields_.isEmpty()) {
if (fieldsBuilder_.isEmpty()) {
fieldsBuilder_.dispose();
fieldsBuilder_ = null;
fields_ = other.fields_;
bitField0_ = (bitField0_ & ~0x00000002);
fieldsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getFieldsFieldBuilder()
: null;
} else {
fieldsBuilder_.addAllMessages(other.fields_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
objectName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.cloud.datastream.v1.SalesforceField m =
input.readMessage(
com.google.cloud.datastream.v1.SalesforceField.parser(), extensionRegistry);
if (fieldsBuilder_ == null) {
ensureFieldsIsMutable();
fields_.add(m);
} else {
fieldsBuilder_.addMessage(m);
}
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object objectName_ = "";
/**
*
*
* <pre>
* Object name.
* </pre>
*
* <code>string object_name = 1;</code>
*
* @return The objectName.
*/
public java.lang.String getObjectName() {
java.lang.Object ref = objectName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
objectName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Object name.
* </pre>
*
* <code>string object_name = 1;</code>
*
* @return The bytes for objectName.
*/
public com.google.protobuf.ByteString getObjectNameBytes() {
java.lang.Object ref = objectName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
objectName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Object name.
* </pre>
*
* <code>string object_name = 1;</code>
*
* @param value The objectName to set.
* @return This builder for chaining.
*/
public Builder setObjectName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
objectName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Object name.
* </pre>
*
* <code>string object_name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearObjectName() {
objectName_ = getDefaultInstance().getObjectName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Object name.
* </pre>
*
* <code>string object_name = 1;</code>
*
* @param value The bytes for objectName to set.
* @return This builder for chaining.
*/
public Builder setObjectNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
objectName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.util.List<com.google.cloud.datastream.v1.SalesforceField> fields_ =
java.util.Collections.emptyList();
private void ensureFieldsIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
fields_ = new java.util.ArrayList<com.google.cloud.datastream.v1.SalesforceField>(fields_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1.SalesforceField,
com.google.cloud.datastream.v1.SalesforceField.Builder,
com.google.cloud.datastream.v1.SalesforceFieldOrBuilder>
fieldsBuilder_;
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
public java.util.List<com.google.cloud.datastream.v1.SalesforceField> getFieldsList() {
if (fieldsBuilder_ == null) {
return java.util.Collections.unmodifiableList(fields_);
} else {
return fieldsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
public int getFieldsCount() {
if (fieldsBuilder_ == null) {
return fields_.size();
} else {
return fieldsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
public com.google.cloud.datastream.v1.SalesforceField getFields(int index) {
if (fieldsBuilder_ == null) {
return fields_.get(index);
} else {
return fieldsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
public Builder setFields(int index, com.google.cloud.datastream.v1.SalesforceField value) {
if (fieldsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFieldsIsMutable();
fields_.set(index, value);
onChanged();
} else {
fieldsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
public Builder setFields(
int index, com.google.cloud.datastream.v1.SalesforceField.Builder builderForValue) {
if (fieldsBuilder_ == null) {
ensureFieldsIsMutable();
fields_.set(index, builderForValue.build());
onChanged();
} else {
fieldsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
public Builder addFields(com.google.cloud.datastream.v1.SalesforceField value) {
if (fieldsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFieldsIsMutable();
fields_.add(value);
onChanged();
} else {
fieldsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
public Builder addFields(int index, com.google.cloud.datastream.v1.SalesforceField value) {
if (fieldsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFieldsIsMutable();
fields_.add(index, value);
onChanged();
} else {
fieldsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
public Builder addFields(
com.google.cloud.datastream.v1.SalesforceField.Builder builderForValue) {
if (fieldsBuilder_ == null) {
ensureFieldsIsMutable();
fields_.add(builderForValue.build());
onChanged();
} else {
fieldsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
public Builder addFields(
int index, com.google.cloud.datastream.v1.SalesforceField.Builder builderForValue) {
if (fieldsBuilder_ == null) {
ensureFieldsIsMutable();
fields_.add(index, builderForValue.build());
onChanged();
} else {
fieldsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
public Builder addAllFields(
java.lang.Iterable<? extends com.google.cloud.datastream.v1.SalesforceField> values) {
if (fieldsBuilder_ == null) {
ensureFieldsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, fields_);
onChanged();
} else {
fieldsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
public Builder clearFields() {
if (fieldsBuilder_ == null) {
fields_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
fieldsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
public Builder removeFields(int index) {
if (fieldsBuilder_ == null) {
ensureFieldsIsMutable();
fields_.remove(index);
onChanged();
} else {
fieldsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
public com.google.cloud.datastream.v1.SalesforceField.Builder getFieldsBuilder(int index) {
return getFieldsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
public com.google.cloud.datastream.v1.SalesforceFieldOrBuilder getFieldsOrBuilder(int index) {
if (fieldsBuilder_ == null) {
return fields_.get(index);
} else {
return fieldsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
public java.util.List<? extends com.google.cloud.datastream.v1.SalesforceFieldOrBuilder>
getFieldsOrBuilderList() {
if (fieldsBuilder_ != null) {
return fieldsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(fields_);
}
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
public com.google.cloud.datastream.v1.SalesforceField.Builder addFieldsBuilder() {
return getFieldsFieldBuilder()
.addBuilder(com.google.cloud.datastream.v1.SalesforceField.getDefaultInstance());
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
public com.google.cloud.datastream.v1.SalesforceField.Builder addFieldsBuilder(int index) {
return getFieldsFieldBuilder()
.addBuilder(index, com.google.cloud.datastream.v1.SalesforceField.getDefaultInstance());
}
/**
*
*
* <pre>
* Salesforce fields.
* When unspecified as part of include objects,
* includes everything, when unspecified as part of exclude objects,
* excludes nothing.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.SalesforceField fields = 2;</code>
*/
public java.util.List<com.google.cloud.datastream.v1.SalesforceField.Builder>
getFieldsBuilderList() {
return getFieldsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1.SalesforceField,
com.google.cloud.datastream.v1.SalesforceField.Builder,
com.google.cloud.datastream.v1.SalesforceFieldOrBuilder>
getFieldsFieldBuilder() {
if (fieldsBuilder_ == null) {
fieldsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1.SalesforceField,
com.google.cloud.datastream.v1.SalesforceField.Builder,
com.google.cloud.datastream.v1.SalesforceFieldOrBuilder>(
fields_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean());
fields_ = null;
}
return fieldsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datastream.v1.SalesforceObject)
}
// @@protoc_insertion_point(class_scope:google.cloud.datastream.v1.SalesforceObject)
private static final com.google.cloud.datastream.v1.SalesforceObject DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datastream.v1.SalesforceObject();
}
public static com.google.cloud.datastream.v1.SalesforceObject getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SalesforceObject> PARSER =
new com.google.protobuf.AbstractParser<SalesforceObject>() {
@java.lang.Override
public SalesforceObject parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SalesforceObject> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SalesforceObject> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datastream.v1.SalesforceObject getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,762 | java-dialogflow/proto-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/SearchAgentsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2beta1/agent.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2beta1;
/**
*
*
* <pre>
* The response message for
* [Agents.SearchAgents][google.cloud.dialogflow.v2beta1.Agents.SearchAgents].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.SearchAgentsResponse}
*/
public final class SearchAgentsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2beta1.SearchAgentsResponse)
SearchAgentsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchAgentsResponse.newBuilder() to construct.
private SearchAgentsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchAgentsResponse() {
agents_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SearchAgentsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.AgentProto
.internal_static_google_cloud_dialogflow_v2beta1_SearchAgentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.AgentProto
.internal_static_google_cloud_dialogflow_v2beta1_SearchAgentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse.class,
com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse.Builder.class);
}
public static final int AGENTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.dialogflow.v2beta1.Agent> agents_;
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.dialogflow.v2beta1.Agent> getAgentsList() {
return agents_;
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.dialogflow.v2beta1.AgentOrBuilder>
getAgentsOrBuilderList() {
return agents_;
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
@java.lang.Override
public int getAgentsCount() {
return agents_.size();
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.Agent getAgents(int index) {
return agents_.get(index);
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.AgentOrBuilder getAgentsOrBuilder(int index) {
return agents_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < agents_.size(); i++) {
output.writeMessage(1, agents_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < agents_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, agents_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse other =
(com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse) obj;
if (!getAgentsList().equals(other.getAgentsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAgentsCount() > 0) {
hash = (37 * hash) + AGENTS_FIELD_NUMBER;
hash = (53 * hash) + getAgentsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response message for
* [Agents.SearchAgents][google.cloud.dialogflow.v2beta1.Agents.SearchAgents].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.SearchAgentsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2beta1.SearchAgentsResponse)
com.google.cloud.dialogflow.v2beta1.SearchAgentsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.AgentProto
.internal_static_google_cloud_dialogflow_v2beta1_SearchAgentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.AgentProto
.internal_static_google_cloud_dialogflow_v2beta1_SearchAgentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse.class,
com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (agentsBuilder_ == null) {
agents_ = java.util.Collections.emptyList();
} else {
agents_ = null;
agentsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2beta1.AgentProto
.internal_static_google_cloud_dialogflow_v2beta1_SearchAgentsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse build() {
com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse buildPartial() {
com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse result =
new com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse result) {
if (agentsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
agents_ = java.util.Collections.unmodifiableList(agents_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.agents_ = agents_;
} else {
result.agents_ = agentsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse) {
return mergeFrom((com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse other) {
if (other == com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse.getDefaultInstance())
return this;
if (agentsBuilder_ == null) {
if (!other.agents_.isEmpty()) {
if (agents_.isEmpty()) {
agents_ = other.agents_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAgentsIsMutable();
agents_.addAll(other.agents_);
}
onChanged();
}
} else {
if (!other.agents_.isEmpty()) {
if (agentsBuilder_.isEmpty()) {
agentsBuilder_.dispose();
agentsBuilder_ = null;
agents_ = other.agents_;
bitField0_ = (bitField0_ & ~0x00000001);
agentsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAgentsFieldBuilder()
: null;
} else {
agentsBuilder_.addAllMessages(other.agents_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.dialogflow.v2beta1.Agent m =
input.readMessage(
com.google.cloud.dialogflow.v2beta1.Agent.parser(), extensionRegistry);
if (agentsBuilder_ == null) {
ensureAgentsIsMutable();
agents_.add(m);
} else {
agentsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.dialogflow.v2beta1.Agent> agents_ =
java.util.Collections.emptyList();
private void ensureAgentsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
agents_ = new java.util.ArrayList<com.google.cloud.dialogflow.v2beta1.Agent>(agents_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2beta1.Agent,
com.google.cloud.dialogflow.v2beta1.Agent.Builder,
com.google.cloud.dialogflow.v2beta1.AgentOrBuilder>
agentsBuilder_;
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
public java.util.List<com.google.cloud.dialogflow.v2beta1.Agent> getAgentsList() {
if (agentsBuilder_ == null) {
return java.util.Collections.unmodifiableList(agents_);
} else {
return agentsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
public int getAgentsCount() {
if (agentsBuilder_ == null) {
return agents_.size();
} else {
return agentsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.Agent getAgents(int index) {
if (agentsBuilder_ == null) {
return agents_.get(index);
} else {
return agentsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
public Builder setAgents(int index, com.google.cloud.dialogflow.v2beta1.Agent value) {
if (agentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAgentsIsMutable();
agents_.set(index, value);
onChanged();
} else {
agentsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
public Builder setAgents(
int index, com.google.cloud.dialogflow.v2beta1.Agent.Builder builderForValue) {
if (agentsBuilder_ == null) {
ensureAgentsIsMutable();
agents_.set(index, builderForValue.build());
onChanged();
} else {
agentsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
public Builder addAgents(com.google.cloud.dialogflow.v2beta1.Agent value) {
if (agentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAgentsIsMutable();
agents_.add(value);
onChanged();
} else {
agentsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
public Builder addAgents(int index, com.google.cloud.dialogflow.v2beta1.Agent value) {
if (agentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAgentsIsMutable();
agents_.add(index, value);
onChanged();
} else {
agentsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
public Builder addAgents(com.google.cloud.dialogflow.v2beta1.Agent.Builder builderForValue) {
if (agentsBuilder_ == null) {
ensureAgentsIsMutable();
agents_.add(builderForValue.build());
onChanged();
} else {
agentsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
public Builder addAgents(
int index, com.google.cloud.dialogflow.v2beta1.Agent.Builder builderForValue) {
if (agentsBuilder_ == null) {
ensureAgentsIsMutable();
agents_.add(index, builderForValue.build());
onChanged();
} else {
agentsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
public Builder addAllAgents(
java.lang.Iterable<? extends com.google.cloud.dialogflow.v2beta1.Agent> values) {
if (agentsBuilder_ == null) {
ensureAgentsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, agents_);
onChanged();
} else {
agentsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
public Builder clearAgents() {
if (agentsBuilder_ == null) {
agents_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
agentsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
public Builder removeAgents(int index) {
if (agentsBuilder_ == null) {
ensureAgentsIsMutable();
agents_.remove(index);
onChanged();
} else {
agentsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.Agent.Builder getAgentsBuilder(int index) {
return getAgentsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.AgentOrBuilder getAgentsOrBuilder(int index) {
if (agentsBuilder_ == null) {
return agents_.get(index);
} else {
return agentsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
public java.util.List<? extends com.google.cloud.dialogflow.v2beta1.AgentOrBuilder>
getAgentsOrBuilderList() {
if (agentsBuilder_ != null) {
return agentsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(agents_);
}
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.Agent.Builder addAgentsBuilder() {
return getAgentsFieldBuilder()
.addBuilder(com.google.cloud.dialogflow.v2beta1.Agent.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.Agent.Builder addAgentsBuilder(int index) {
return getAgentsFieldBuilder()
.addBuilder(index, com.google.cloud.dialogflow.v2beta1.Agent.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of agents. There will be a maximum number of items returned based
* on the page_size field in the request.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.Agent agents = 1;</code>
*/
public java.util.List<com.google.cloud.dialogflow.v2beta1.Agent.Builder>
getAgentsBuilderList() {
return getAgentsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2beta1.Agent,
com.google.cloud.dialogflow.v2beta1.Agent.Builder,
com.google.cloud.dialogflow.v2beta1.AgentOrBuilder>
getAgentsFieldBuilder() {
if (agentsBuilder_ == null) {
agentsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2beta1.Agent,
com.google.cloud.dialogflow.v2beta1.Agent.Builder,
com.google.cloud.dialogflow.v2beta1.AgentOrBuilder>(
agents_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
agents_ = null;
}
return agentsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2beta1.SearchAgentsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2beta1.SearchAgentsResponse)
private static final com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse();
}
public static com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchAgentsResponse> PARSER =
new com.google.protobuf.AbstractParser<SearchAgentsResponse>() {
@java.lang.Override
public SearchAgentsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SearchAgentsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchAgentsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.SearchAgentsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,708 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/GetEffectiveFirewallsInstanceRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* A request message for Instances.GetEffectiveFirewalls. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest}
*/
public final class GetEffectiveFirewallsInstanceRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest)
GetEffectiveFirewallsInstanceRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetEffectiveFirewallsInstanceRequest.newBuilder() to construct.
private GetEffectiveFirewallsInstanceRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetEffectiveFirewallsInstanceRequest() {
instance_ = "";
networkInterface_ = "";
project_ = "";
zone_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GetEffectiveFirewallsInstanceRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetEffectiveFirewallsInstanceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetEffectiveFirewallsInstanceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest.class,
com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest.Builder.class);
}
public static final int INSTANCE_FIELD_NUMBER = 18257045;
@SuppressWarnings("serial")
private volatile java.lang.Object instance_ = "";
/**
*
*
* <pre>
* Name of the instance scoping this request.
* </pre>
*
* <code>string instance = 18257045 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The instance.
*/
@java.lang.Override
public java.lang.String getInstance() {
java.lang.Object ref = instance_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
instance_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the instance scoping this request.
* </pre>
*
* <code>string instance = 18257045 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for instance.
*/
@java.lang.Override
public com.google.protobuf.ByteString getInstanceBytes() {
java.lang.Object ref = instance_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
instance_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int NETWORK_INTERFACE_FIELD_NUMBER = 365387880;
@SuppressWarnings("serial")
private volatile java.lang.Object networkInterface_ = "";
/**
*
*
* <pre>
* The name of the network interface to get the effective firewalls.
* </pre>
*
* <code>string network_interface = 365387880 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The networkInterface.
*/
@java.lang.Override
public java.lang.String getNetworkInterface() {
java.lang.Object ref = networkInterface_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
networkInterface_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the network interface to get the effective firewalls.
* </pre>
*
* <code>string network_interface = 365387880 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for networkInterface.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNetworkInterfaceBytes() {
java.lang.Object ref = networkInterface_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
networkInterface_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PROJECT_FIELD_NUMBER = 227560217;
@SuppressWarnings("serial")
private volatile java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ZONE_FIELD_NUMBER = 3744684;
@SuppressWarnings("serial")
private volatile java.lang.Object zone_ = "";
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The zone.
*/
@java.lang.Override
public java.lang.String getZone() {
java.lang.Object ref = zone_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
zone_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for zone.
*/
@java.lang.Override
public com.google.protobuf.ByteString getZoneBytes() {
java.lang.Object ref = zone_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
zone_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3744684, zone_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instance_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 18257045, instance_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(networkInterface_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 365387880, networkInterface_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3744684, zone_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instance_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(18257045, instance_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(networkInterface_)) {
size +=
com.google.protobuf.GeneratedMessageV3.computeStringSize(365387880, networkInterface_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest other =
(com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest) obj;
if (!getInstance().equals(other.getInstance())) return false;
if (!getNetworkInterface().equals(other.getNetworkInterface())) return false;
if (!getProject().equals(other.getProject())) return false;
if (!getZone().equals(other.getZone())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + INSTANCE_FIELD_NUMBER;
hash = (53 * hash) + getInstance().hashCode();
hash = (37 * hash) + NETWORK_INTERFACE_FIELD_NUMBER;
hash = (53 * hash) + getNetworkInterface().hashCode();
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
hash = (37 * hash) + ZONE_FIELD_NUMBER;
hash = (53 * hash) + getZone().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request message for Instances.GetEffectiveFirewalls. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest)
com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetEffectiveFirewallsInstanceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetEffectiveFirewallsInstanceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest.class,
com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest.Builder.class);
}
// Construct using com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
instance_ = "";
networkInterface_ = "";
project_ = "";
zone_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetEffectiveFirewallsInstanceRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest build() {
com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest buildPartial() {
com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest result =
new com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.instance_ = instance_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.networkInterface_ = networkInterface_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.project_ = project_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.zone_ = zone_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest) {
return mergeFrom((com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest other) {
if (other
== com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest.getDefaultInstance())
return this;
if (!other.getInstance().isEmpty()) {
instance_ = other.instance_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getNetworkInterface().isEmpty()) {
networkInterface_ = other.networkInterface_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getProject().isEmpty()) {
project_ = other.project_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getZone().isEmpty()) {
zone_ = other.zone_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 29957474:
{
zone_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 29957474
case 146056362:
{
instance_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 146056362
case 1820481738:
{
project_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 1820481738
case -1371864254:
{
networkInterface_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case -1371864254
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object instance_ = "";
/**
*
*
* <pre>
* Name of the instance scoping this request.
* </pre>
*
* <code>string instance = 18257045 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The instance.
*/
public java.lang.String getInstance() {
java.lang.Object ref = instance_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
instance_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the instance scoping this request.
* </pre>
*
* <code>string instance = 18257045 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for instance.
*/
public com.google.protobuf.ByteString getInstanceBytes() {
java.lang.Object ref = instance_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
instance_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the instance scoping this request.
* </pre>
*
* <code>string instance = 18257045 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The instance to set.
* @return This builder for chaining.
*/
public Builder setInstance(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
instance_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the instance scoping this request.
* </pre>
*
* <code>string instance = 18257045 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearInstance() {
instance_ = getDefaultInstance().getInstance();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the instance scoping this request.
* </pre>
*
* <code>string instance = 18257045 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for instance to set.
* @return This builder for chaining.
*/
public Builder setInstanceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
instance_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object networkInterface_ = "";
/**
*
*
* <pre>
* The name of the network interface to get the effective firewalls.
* </pre>
*
* <code>string network_interface = 365387880 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The networkInterface.
*/
public java.lang.String getNetworkInterface() {
java.lang.Object ref = networkInterface_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
networkInterface_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the network interface to get the effective firewalls.
* </pre>
*
* <code>string network_interface = 365387880 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for networkInterface.
*/
public com.google.protobuf.ByteString getNetworkInterfaceBytes() {
java.lang.Object ref = networkInterface_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
networkInterface_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the network interface to get the effective firewalls.
* </pre>
*
* <code>string network_interface = 365387880 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The networkInterface to set.
* @return This builder for chaining.
*/
public Builder setNetworkInterface(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
networkInterface_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the network interface to get the effective firewalls.
* </pre>
*
* <code>string network_interface = 365387880 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearNetworkInterface() {
networkInterface_ = getDefaultInstance().getNetworkInterface();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the network interface to get the effective firewalls.
* </pre>
*
* <code>string network_interface = 365387880 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for networkInterface to set.
* @return This builder for chaining.
*/
public Builder setNetworkInterfaceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
networkInterface_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object zone_ = "";
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The zone.
*/
public java.lang.String getZone() {
java.lang.Object ref = zone_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
zone_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for zone.
*/
public com.google.protobuf.ByteString getZoneBytes() {
java.lang.Object ref = zone_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
zone_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The zone to set.
* @return This builder for chaining.
*/
public Builder setZone(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
zone_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearZone() {
zone_ = getDefaultInstance().getZone();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>string zone = 3744684 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for zone to set.
* @return This builder for chaining.
*/
public Builder setZoneBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
zone_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest)
private static final com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest();
}
public static com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetEffectiveFirewallsInstanceRequest> PARSER =
new com.google.protobuf.AbstractParser<GetEffectiveFirewallsInstanceRequest>() {
@java.lang.Override
public GetEffectiveFirewallsInstanceRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<GetEffectiveFirewallsInstanceRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetEffectiveFirewallsInstanceRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetEffectiveFirewallsInstanceRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,625 | java-tpu/proto-google-cloud-tpu-v1/src/main/java/com/google/cloud/tpu/v1/ListTensorFlowVersionsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/tpu/v1/cloud_tpu.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.tpu.v1;
/**
*
*
* <pre>
* Request for
* [ListTensorFlowVersions][google.cloud.tpu.v1.Tpu.ListTensorFlowVersions].
* </pre>
*
* Protobuf type {@code google.cloud.tpu.v1.ListTensorFlowVersionsRequest}
*/
public final class ListTensorFlowVersionsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.tpu.v1.ListTensorFlowVersionsRequest)
ListTensorFlowVersionsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListTensorFlowVersionsRequest.newBuilder() to construct.
private ListTensorFlowVersionsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListTensorFlowVersionsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
orderBy_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListTensorFlowVersionsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.tpu.v1.CloudTpuProto
.internal_static_google_cloud_tpu_v1_ListTensorFlowVersionsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.tpu.v1.CloudTpuProto
.internal_static_google_cloud_tpu_v1_ListTensorFlowVersionsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest.class,
com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent resource name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of items to return.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 5;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 5;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ORDER_BY_FIELD_NUMBER = 6;
@SuppressWarnings("serial")
private volatile java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Sort results.
* </pre>
*
* <code>string order_by = 6;</code>
*
* @return The orderBy.
*/
@java.lang.Override
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
}
}
/**
*
*
* <pre>
* Sort results.
* </pre>
*
* <code>string order_by = 6;</code>
*
* @return The bytes for orderBy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 6, orderBy_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, orderBy_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest)) {
return super.equals(obj);
}
com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest other =
(com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getOrderBy().equals(other.getOrderBy())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + ORDER_BY_FIELD_NUMBER;
hash = (53 * hash) + getOrderBy().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for
* [ListTensorFlowVersions][google.cloud.tpu.v1.Tpu.ListTensorFlowVersions].
* </pre>
*
* Protobuf type {@code google.cloud.tpu.v1.ListTensorFlowVersionsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.tpu.v1.ListTensorFlowVersionsRequest)
com.google.cloud.tpu.v1.ListTensorFlowVersionsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.tpu.v1.CloudTpuProto
.internal_static_google_cloud_tpu_v1_ListTensorFlowVersionsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.tpu.v1.CloudTpuProto
.internal_static_google_cloud_tpu_v1_ListTensorFlowVersionsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest.class,
com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest.Builder.class);
}
// Construct using com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
orderBy_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.tpu.v1.CloudTpuProto
.internal_static_google_cloud_tpu_v1_ListTensorFlowVersionsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest getDefaultInstanceForType() {
return com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest build() {
com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest buildPartial() {
com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest result =
new com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.orderBy_ = orderBy_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest) {
return mergeFrom((com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest other) {
if (other == com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
if (!other.getOrderBy().isEmpty()) {
orderBy_ = other.orderBy_;
bitField0_ |= 0x00000010;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 42:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 42
case 50:
{
orderBy_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000010;
break;
} // case 50
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource name.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of items to return.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of items to return.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of items to return.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous List request, if any.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 5;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 5;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 5;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* List filter.
* </pre>
*
* <code>string filter = 5;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
private java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Sort results.
* </pre>
*
* <code>string order_by = 6;</code>
*
* @return The orderBy.
*/
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Sort results.
* </pre>
*
* <code>string order_by = 6;</code>
*
* @return The bytes for orderBy.
*/
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Sort results.
* </pre>
*
* <code>string order_by = 6;</code>
*
* @param value The orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderBy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
* <pre>
* Sort results.
* </pre>
*
* <code>string order_by = 6;</code>
*
* @return This builder for chaining.
*/
public Builder clearOrderBy() {
orderBy_ = getDefaultInstance().getOrderBy();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
return this;
}
/**
*
*
* <pre>
* Sort results.
* </pre>
*
* <code>string order_by = 6;</code>
*
* @param value The bytes for orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderByBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.tpu.v1.ListTensorFlowVersionsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.tpu.v1.ListTensorFlowVersionsRequest)
private static final com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest();
}
public static com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListTensorFlowVersionsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListTensorFlowVersionsRequest>() {
@java.lang.Override
public ListTensorFlowVersionsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListTensorFlowVersionsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListTensorFlowVersionsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.tpu.v1.ListTensorFlowVersionsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/nifi | 38,061 | nifi-commons/nifi-record/src/test/java/org/apache/nifi/serialization/record/ResultSetRecordSetTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.serialization.record;
import org.apache.nifi.serialization.SimpleRecordSchema;
import org.apache.nifi.serialization.record.type.ArrayDataType;
import org.apache.nifi.serialization.record.type.DecimalDataType;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentMatchers;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
import org.mockito.junit.jupiter.MockitoSettings;
import org.mockito.quality.Strictness;
import java.math.BigDecimal;
import java.sql.Array;
import java.sql.Date;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.Time;
import java.sql.Timestamp;
import java.sql.Types;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
@MockitoSettings(strictness = Strictness.LENIENT)
public class ResultSetRecordSetTest {
private static final String COLUMN_NAME_VARCHAR = "varchar";
private static final String COLUMN_NAME_BIGINT = "bigint";
private static final String COLUMN_NAME_ROWID = "rowid";
private static final String COLUMN_NAME_BIT = "bit";
private static final String COLUMN_NAME_BOOLEAN = "boolean";
private static final String COLUMN_NAME_CHAR = "char";
private static final String COLUMN_NAME_DATE = "date";
private static final String COLUMN_NAME_TIMESTAMP = "timestamp";
private static final String COLUMN_NAME_INTEGER = "integer";
private static final String COLUMN_NAME_DOUBLE = "double";
private static final String COLUMN_NAME_REAL = "real";
private static final String COLUMN_NAME_FLOAT = "float";
private static final String COLUMN_NAME_SMALLINT = "smallint";
private static final String COLUMN_NAME_TINYINT = "tinyint";
private static final String COLUMN_NAME_BIG_DECIMAL_1 = "bigDecimal1";
private static final String COLUMN_NAME_BIG_DECIMAL_2 = "bigDecimal2";
private static final String COLUMN_NAME_BIG_DECIMAL_3 = "bigDecimal3";
private static final String COLUMN_NAME_BIG_DECIMAL_4 = "bigDecimal4";
private static final String COLUMN_NAME_BIG_DECIMAL_5 = "bigDecimal5";
private static final long TIMESTAMP_IN_MILLIS = 1631809132516L;
private static final TestColumn[] COLUMNS = new TestColumn[] {
new TestColumn(1, COLUMN_NAME_VARCHAR, Types.VARCHAR, RecordFieldType.STRING.getDataType()),
new TestColumn(2, COLUMN_NAME_BIGINT, Types.BIGINT, RecordFieldType.LONG.getDataType()),
new TestColumn(3, COLUMN_NAME_ROWID, Types.ROWID, RecordFieldType.LONG.getDataType()),
new TestColumn(4, COLUMN_NAME_BIT, Types.BIT, RecordFieldType.BOOLEAN.getDataType()),
new TestColumn(5, COLUMN_NAME_BOOLEAN, Types.BOOLEAN, RecordFieldType.BOOLEAN.getDataType()),
new TestColumn(6, COLUMN_NAME_CHAR, Types.CHAR, RecordFieldType.STRING.getDataType()),
new TestColumn(7, COLUMN_NAME_DATE, Types.DATE, RecordFieldType.DATE.getDataType()),
new TestColumn(8, COLUMN_NAME_INTEGER, Types.INTEGER, RecordFieldType.INT.getDataType()),
new TestColumn(9, COLUMN_NAME_DOUBLE, Types.DOUBLE, RecordFieldType.DOUBLE.getDataType()),
new TestColumn(10, COLUMN_NAME_REAL, Types.REAL, RecordFieldType.DOUBLE.getDataType()),
new TestColumn(11, COLUMN_NAME_FLOAT, Types.FLOAT, RecordFieldType.FLOAT.getDataType()),
new TestColumn(12, COLUMN_NAME_SMALLINT, Types.SMALLINT, RecordFieldType.SHORT.getDataType()),
new TestColumn(13, COLUMN_NAME_TINYINT, Types.TINYINT, RecordFieldType.BYTE.getDataType()),
new TestColumn(14, COLUMN_NAME_BIG_DECIMAL_1, Types.DECIMAL, RecordFieldType.DECIMAL.getDecimalDataType(7, 3)),
new TestColumn(15, COLUMN_NAME_BIG_DECIMAL_2, Types.NUMERIC, RecordFieldType.DECIMAL.getDecimalDataType(4, 0)),
new TestColumn(16, COLUMN_NAME_BIG_DECIMAL_3, Types.JAVA_OBJECT, RecordFieldType.DECIMAL.getDecimalDataType(501, 1)),
new TestColumn(17, COLUMN_NAME_BIG_DECIMAL_4, Types.DECIMAL, RecordFieldType.DECIMAL.getDecimalDataType(10, 3)),
new TestColumn(18, COLUMN_NAME_BIG_DECIMAL_5, Types.DECIMAL, RecordFieldType.DECIMAL.getDecimalDataType(3, 10)),
new TestColumn(19, COLUMN_NAME_TIMESTAMP, Types.TIMESTAMP, RecordFieldType.TIMESTAMP.getDataType())
};
@Mock
private ResultSet resultSet;
@Mock
private ResultSetMetaData resultSetMetaData;
@BeforeEach
public void setUp() throws SQLException {
setUpMocks(COLUMNS, resultSetMetaData, resultSet);
}
@Test
public void testCreateSchema() throws SQLException {
// given
final RecordSchema recordSchema = givenRecordSchema(COLUMNS);
final RecordSchema expectedSchema = givenRecordSchema(COLUMNS);
// when
final ResultSetRecordSet testSubject = new ResultSetRecordSet(resultSet, recordSchema);
final RecordSchema actualSchema = testSubject.getSchema();
// then
thenAllColumnDataTypesAreCorrect(COLUMNS, expectedSchema, actualSchema);
}
@Test
public void testCreateSchemaWhenScaleIsNonDefault() throws SQLException {
// given
final RecordSchema recordSchema = givenRecordSchema(COLUMNS);
final RecordSchema expectedSchema = givenRecordSchema(COLUMNS);
// when
final ResultSetRecordSet testSubject = new ResultSetRecordSet(resultSet, recordSchema, 10, 2);
final RecordSchema actualSchema = testSubject.getSchema();
// then
thenAllColumnDataTypesAreCorrect(COLUMNS, expectedSchema, actualSchema);
}
@Test
public void testCreateSchemaWhenNoRecordSchema() throws SQLException {
// given
final RecordSchema expectedSchema = givenRecordSchema(COLUMNS);
// when
final ResultSetRecordSet testSubject = new ResultSetRecordSet(resultSet, null);
final RecordSchema actualSchema = testSubject.getSchema();
// then
thenAllColumnDataTypesAreCorrect(COLUMNS, expectedSchema, actualSchema);
}
@Test
public void testCreateSchemaWhenOtherType() throws SQLException {
// given
final List<RecordField> fields = new ArrayList<>();
fields.add(new RecordField("column", RecordFieldType.DECIMAL.getDecimalDataType(30, 10)));
final RecordSchema recordSchema = new SimpleRecordSchema(fields);
final ResultSet resultSet = givenResultSetForOther(fields);
// when
final ResultSetRecordSet testSubject = new ResultSetRecordSet(resultSet, recordSchema);
final RecordSchema resultSchema = testSubject.getSchema();
// then
assertEquals(RecordFieldType.DECIMAL.getDecimalDataType(30, 10), resultSchema.getField(0).getDataType());
}
@Test
public void testCreateSchemaWhenOtherTypeUsingLogicalTypes() throws SQLException {
// given
final List<RecordField> fields = givenFieldsThatRequireLogicalTypes();
final RecordSchema recordSchema = new SimpleRecordSchema(fields);
final ResultSet resultSet = givenResultSetForOther(fields);
// when
final ResultSetRecordSet testSubject = new ResultSetRecordSet(resultSet, recordSchema, 10, 0, true);
final RecordSchema resultSchema = testSubject.getSchema();
// then
thenAllDataTypesMatchInputFieldType(fields, resultSchema);
}
@Test
public void testCreateSchemaWhenOtherTypeAndNoLogicalTypes() throws SQLException {
// given
final List<RecordField> fields = givenFieldsThatRequireLogicalTypes();
final RecordSchema recordSchema = new SimpleRecordSchema(fields);
final ResultSet resultSet = givenResultSetForOther(fields);
// when
final ResultSetRecordSet testSubject = new ResultSetRecordSet(resultSet, recordSchema, 10, 0, false);
final RecordSchema resultSchema = testSubject.getSchema();
// then
thenAllDataTypesAreString(resultSchema);
}
@Test
public void testCreateSchemaWhenOtherTypeUsingLogicalTypesNoSchema() throws SQLException {
// given
final List<RecordField> fields = givenFieldsThatRequireLogicalTypes();
final ResultSet resultSet = givenResultSetForOther(fields);
// when
final ResultSetRecordSet testSubject = new ResultSetRecordSet(resultSet, null, 10, 0, true);
final RecordSchema resultSchema = testSubject.getSchema();
// then
thenAllDataTypesAreChoice(fields, resultSchema);
}
@Test
public void testCreateSchemaWhenOtherTypeAndNoLogicalTypesNoSchema() throws SQLException {
// given
final List<RecordField> fields = givenFieldsThatRequireLogicalTypes();
final ResultSet resultSet = givenResultSetForOther(fields);
// when
final ResultSetRecordSet testSubject = new ResultSetRecordSet(resultSet, null, 10, 0, false);
final RecordSchema resultSchema = testSubject.getSchema();
// then
thenAllDataTypesAreString(resultSchema);
}
@Test
public void testCreateSchemaWhenOtherTypeUsingLogicalTypesWithRecord() throws SQLException {
// given
final Record inputRecord = givenInputRecord(); // The field's type is going to be RECORD (there is a record within a record)
final List<RecordField> fields = givenFieldsThatAreOfTypeRecord(Arrays.asList(inputRecord));
final ResultSet resultSet = givenResultSetForOther(fields);
when(resultSet.getObject(1)).thenReturn(inputRecord);
// when
final ResultSetRecordSet testSubject = new ResultSetRecordSet(resultSet, null, 10, 0, true);
final RecordSchema resultSchema = testSubject.getSchema();
// then
thenAllDataTypesMatchInputFieldType(fields, resultSchema);
}
@Test
public void testCreateSchemaWhenOtherTypeWithoutSchema() throws SQLException {
// given
final List<RecordField> fields = new ArrayList<>();
fields.add(new RecordField("column", RecordFieldType.STRING.getDataType()));
final ResultSet resultSet = givenResultSetForOther(fields);
// when
final ResultSetRecordSet testSubject = new ResultSetRecordSet(resultSet, null);
final RecordSchema resultSchema = testSubject.getSchema();
// then
assertEquals(RecordFieldType.CHOICE, resultSchema.getField(0).getDataType().getFieldType());
}
@Test
public void testCreateRecord() throws SQLException {
final RecordSchema recordSchema = givenRecordSchema(COLUMNS);
LocalDate testDate = LocalDate.of(2021, 1, 26);
LocalDateTime testDateTime = LocalDateTime.of(2021, 9, 10, 11, 11, 11);
final String varcharValue = "varchar";
final Long bigintValue = 1234567890123456789L;
final Long rowidValue = 11111111L;
final Boolean bitValue = Boolean.FALSE;
final Boolean booleanValue = Boolean.TRUE;
final Character charValue = 'c';
final Timestamp timestampValue = Timestamp.valueOf(testDateTime);
final Integer integerValue = 1234567890;
final Double doubleValue = 0.12;
final Double realValue = 3.45;
final Float floatValue = 6.78F;
final Short smallintValue = 12345;
final Byte tinyintValue = 123;
final BigDecimal bigDecimal1Value = new BigDecimal("1234.567");
final BigDecimal bigDecimal2Value = new BigDecimal("1234");
final BigDecimal bigDecimal3Value = new BigDecimal("1234567890.1");
final BigDecimal bigDecimal4Value = new BigDecimal("1234567.089");
final BigDecimal bigDecimal5Value = new BigDecimal("0.1234567");
when(resultSet.getObject(COLUMN_NAME_VARCHAR)).thenReturn(varcharValue);
when(resultSet.getObject(COLUMN_NAME_BIGINT)).thenReturn(bigintValue);
when(resultSet.getObject(COLUMN_NAME_ROWID)).thenReturn(rowidValue);
when(resultSet.getObject(COLUMN_NAME_BIT)).thenReturn(bitValue);
when(resultSet.getObject(COLUMN_NAME_BOOLEAN)).thenReturn(booleanValue);
when(resultSet.getObject(COLUMN_NAME_CHAR)).thenReturn(charValue);
when(resultSet.getObject(COLUMN_NAME_DATE)).thenReturn(testDate);
when(resultSet.getTimestamp(COLUMN_NAME_TIMESTAMP)).thenReturn(timestampValue);
when(resultSet.getObject(COLUMN_NAME_INTEGER)).thenReturn(integerValue);
when(resultSet.getObject(COLUMN_NAME_DOUBLE)).thenReturn(doubleValue);
when(resultSet.getObject(COLUMN_NAME_REAL)).thenReturn(realValue);
when(resultSet.getObject(COLUMN_NAME_FLOAT)).thenReturn(floatValue);
when(resultSet.getObject(COLUMN_NAME_SMALLINT)).thenReturn(smallintValue);
when(resultSet.getObject(COLUMN_NAME_TINYINT)).thenReturn(tinyintValue);
when(resultSet.getObject(COLUMN_NAME_BIG_DECIMAL_1)).thenReturn(bigDecimal1Value);
when(resultSet.getObject(COLUMN_NAME_BIG_DECIMAL_2)).thenReturn(bigDecimal2Value);
when(resultSet.getObject(COLUMN_NAME_BIG_DECIMAL_3)).thenReturn(bigDecimal3Value);
when(resultSet.getObject(COLUMN_NAME_BIG_DECIMAL_4)).thenReturn(bigDecimal4Value);
when(resultSet.getObject(COLUMN_NAME_BIG_DECIMAL_5)).thenReturn(bigDecimal5Value);
ResultSetRecordSet testSubject = new ResultSetRecordSet(resultSet, recordSchema);
Record record = testSubject.createRecord(resultSet);
assertEquals(varcharValue, record.getAsString(COLUMN_NAME_VARCHAR));
assertEquals(bigintValue, record.getAsLong(COLUMN_NAME_BIGINT));
assertEquals(rowidValue, record.getAsLong(COLUMN_NAME_ROWID));
assertEquals(bitValue, record.getAsBoolean(COLUMN_NAME_BIT));
assertEquals(booleanValue, record.getAsBoolean(COLUMN_NAME_BOOLEAN));
assertEquals(charValue, record.getValue(COLUMN_NAME_CHAR));
assertEquals(testDate, record.getAsLocalDate(COLUMN_NAME_DATE, null));
final Object timestampObject = record.getValue(COLUMN_NAME_TIMESTAMP);
assertEquals(timestampValue, timestampObject);
assertEquals(integerValue, record.getAsInt(COLUMN_NAME_INTEGER));
assertEquals(doubleValue, record.getAsDouble(COLUMN_NAME_DOUBLE));
assertEquals(realValue, record.getAsDouble(COLUMN_NAME_REAL));
assertEquals(floatValue, record.getAsFloat(COLUMN_NAME_FLOAT));
assertEquals(smallintValue.shortValue(), record.getAsInt(COLUMN_NAME_SMALLINT).shortValue());
assertEquals(tinyintValue.byteValue(), record.getAsInt(COLUMN_NAME_TINYINT).byteValue());
assertEquals(bigDecimal1Value, record.getValue(COLUMN_NAME_BIG_DECIMAL_1));
assertEquals(bigDecimal2Value, record.getValue(COLUMN_NAME_BIG_DECIMAL_2));
assertEquals(bigDecimal3Value, record.getValue(COLUMN_NAME_BIG_DECIMAL_3));
assertEquals(bigDecimal4Value, record.getValue(COLUMN_NAME_BIG_DECIMAL_4));
assertEquals(bigDecimal5Value, record.getValue(COLUMN_NAME_BIG_DECIMAL_5));
}
@Test
public void testCreateSchemaArrayThrowsException() throws SQLException {
// given
final List<RecordField> fields = new ArrayList<>();
fields.add(new RecordField("column", RecordFieldType.DECIMAL.getDecimalDataType(30, 10)));
final RecordSchema recordSchema = new SimpleRecordSchema(fields);
final ResultSet resultSet = givenResultSetForArrayThrowsException(true);
// when
assertThrows(SQLException.class, () -> new ResultSetRecordSet(resultSet, recordSchema));
}
@Test
public void testCreateSchemaThrowsExceptionSchemaCreationStillCalledConsideringLogicalTypeFlag() throws SQLException {
// given
final List<RecordField> fields = new ArrayList<>();
fields.add(new RecordField("column", RecordFieldType.DECIMAL.getDecimalDataType(30, 10)));
final RecordSchema recordSchema = new SimpleRecordSchema(fields);
final ResultSet resultSet = Mockito.mock(ResultSet.class);
final ResultSetMetaData resultSetMetaData = Mockito.mock(ResultSetMetaData.class);
when(resultSet.getMetaData()).thenThrow(new SQLException("test exception")).thenReturn(resultSetMetaData);
when(resultSetMetaData.getColumnCount()).thenReturn(1);
when(resultSetMetaData.getColumnLabel(1)).thenReturn("column");
when(resultSetMetaData.getColumnType(1)).thenReturn(Types.DECIMAL);
// when
ResultSetRecordSet testSubject = new ResultSetRecordSet(resultSet, recordSchema, 10, 0, false);
final RecordSchema resultSchema = testSubject.getSchema();
// then
thenAllDataTypesAreString(resultSchema);
}
@Test
public void testCreateSchemaArrayThrowsNotSupportedException() throws SQLException {
// given
final List<RecordField> fields = new ArrayList<>();
fields.add(new RecordField("column", RecordFieldType.DECIMAL.getDecimalDataType(30, 10)));
final RecordSchema recordSchema = new SimpleRecordSchema(fields);
final ResultSet resultSet = givenResultSetForArrayThrowsException(false);
// when
final ResultSetRecordSet testSubject = new ResultSetRecordSet(resultSet, recordSchema);
final RecordSchema resultSchema = testSubject.getSchema();
// then
assertEquals(RecordFieldType.ARRAY.getArrayDataType(RecordFieldType.STRING.getDataType()), resultSchema.getField(0).getDataType());
}
@Test
public void testArrayTypeWithLogicalTypes() throws SQLException {
testArrayType(true);
}
@Test
public void testArrayTypeNoLogicalTypes() throws SQLException {
testArrayType(false);
}
@Test
public void testCreateSchemaWithLogicalTypes() throws SQLException {
testCreateSchemaLogicalTypes(true, true);
}
@Test
public void testCreateSchemaNoLogicalTypes() throws SQLException {
testCreateSchemaLogicalTypes(false, true);
}
@Test
public void testCreateSchemaWithLogicalTypesNoInputSchema() throws SQLException {
testCreateSchemaLogicalTypes(true, false);
}
@Test
public void testCreateSchemaNoLogicalTypesNoInputSchema() throws SQLException {
testCreateSchemaLogicalTypes(false, false);
}
private void testArrayType(boolean useLogicalTypes) throws SQLException {
// GIVEN
List<ArrayTestData> testData = givenArrayTypesThatRequireLogicalTypes();
Map<String, DataType> expectedTypes = givenExpectedTypesForArrayTypesThatRequireLogicalTypes(useLogicalTypes);
// WHEN
ResultSet resultSet = Mockito.mock(ResultSet.class);
ResultSetMetaData resultSetMetaData = Mockito.mock(ResultSetMetaData.class);
when(resultSet.getMetaData()).thenReturn(resultSetMetaData);
when(resultSetMetaData.getColumnCount()).thenReturn(testData.size());
List<RecordField> fields = whenSchemaFieldsAreSetupForArrayType(testData, resultSet, resultSetMetaData);
RecordSchema recordSchema = new SimpleRecordSchema(fields);
ResultSetRecordSet testSubject = new ResultSetRecordSet(resultSet, recordSchema, 10, 0, useLogicalTypes);
RecordSchema actualSchema = testSubject.getSchema();
// THEN
thenActualArrayElementTypesMatchExpected(expectedTypes, actualSchema);
}
private void testCreateSchemaLogicalTypes(boolean useLogicalTypes, boolean provideInputSchema) throws SQLException {
// GIVEN
TestColumn[] columns = new TestColumn[]{
new TestColumn(1, COLUMN_NAME_DATE, Types.DATE, RecordFieldType.DATE.getDataType()),
new TestColumn(2, "time", Types.TIME, RecordFieldType.TIME.getDataType()),
new TestColumn(3, "time_with_timezone", Types.TIME_WITH_TIMEZONE, RecordFieldType.TIME.getDataType()),
new TestColumn(4, "timestamp", Types.TIMESTAMP, RecordFieldType.TIMESTAMP.getDataType()),
new TestColumn(5, "timestamp_with_timezone", Types.TIMESTAMP_WITH_TIMEZONE, RecordFieldType.TIMESTAMP.getDataType()),
new TestColumn(6, COLUMN_NAME_BIG_DECIMAL_1, Types.DECIMAL, RecordFieldType.DECIMAL.getDecimalDataType(7, 3)),
new TestColumn(7, COLUMN_NAME_BIG_DECIMAL_2, Types.NUMERIC, RecordFieldType.DECIMAL.getDecimalDataType(4, 0)),
new TestColumn(8, COLUMN_NAME_BIG_DECIMAL_3, Types.JAVA_OBJECT, RecordFieldType.DECIMAL.getDecimalDataType(501, 1)),
new TestColumn(9, COLUMN_NAME_BIG_DECIMAL_4, Types.DECIMAL, RecordFieldType.DECIMAL.getDecimalDataType(10, 3)),
new TestColumn(10, COLUMN_NAME_BIG_DECIMAL_5, Types.DECIMAL, RecordFieldType.DECIMAL.getDecimalDataType(3, 10)),
};
final RecordSchema recordSchema = provideInputSchema ? givenRecordSchema(columns) : null;
ResultSetMetaData resultSetMetaData = Mockito.mock(ResultSetMetaData.class);
ResultSet resultSet = Mockito.mock(ResultSet.class);
RecordSchema expectedSchema = useLogicalTypes ? givenRecordSchema(columns) : givenRecordSchemaWithOnlyStringType(columns);
// WHEN
setUpMocks(columns, resultSetMetaData, resultSet);
ResultSetRecordSet testSubject = new ResultSetRecordSet(resultSet, recordSchema, 10, 0, useLogicalTypes);
RecordSchema actualSchema = testSubject.getSchema();
// THEN
thenAllColumnDataTypesAreCorrect(columns, expectedSchema, actualSchema);
}
private void setUpMocks(TestColumn[] columns, ResultSetMetaData resultSetMetaData, ResultSet resultSet) throws SQLException {
when(resultSet.getMetaData()).thenReturn(resultSetMetaData);
when(resultSetMetaData.getColumnCount()).thenReturn(columns.length);
int indexOfBigDecimal = -1;
int index = 0;
for (final TestColumn column : columns) {
when(resultSetMetaData.getColumnLabel(column.getIndex())).thenReturn(column.getColumnName());
when(resultSetMetaData.getColumnName(column.getIndex())).thenReturn(column.getColumnName());
when(resultSetMetaData.getColumnType(column.getIndex())).thenReturn(column.getSqlType());
if (column.getRecordFieldType() instanceof DecimalDataType) {
DecimalDataType ddt = (DecimalDataType) column.getRecordFieldType();
when(resultSetMetaData.getPrecision(column.getIndex())).thenReturn(ddt.getPrecision());
when(resultSetMetaData.getScale(column.getIndex())).thenReturn(ddt.getScale());
}
if (column.getSqlType() == Types.JAVA_OBJECT) {
indexOfBigDecimal = index + 1;
}
++index;
}
// Big decimal values are necessary in order to determine precision and scale
when(resultSet.getBigDecimal(indexOfBigDecimal)).thenReturn(new BigDecimal(String.join("", Collections.nCopies(500, "1")) + ".1"));
// This will be handled by a dedicated branch for Java Objects, needs some further details
when(resultSetMetaData.getColumnClassName(indexOfBigDecimal)).thenReturn(BigDecimal.class.getName());
}
private List<RecordField> givenFieldsThatRequireLogicalTypes() {
final List<RecordField> fields = new ArrayList<>();
fields.add(new RecordField("decimal", RecordFieldType.DECIMAL.getDecimalDataType(30, 10)));
fields.add(new RecordField("date", RecordFieldType.DATE.getDataType()));
fields.add(new RecordField("time", RecordFieldType.TIME.getDataType()));
fields.add(new RecordField("timestamp", RecordFieldType.TIMESTAMP.getDataType()));
return fields;
}
private RecordSchema givenRecordSchema(TestColumn[] columns) {
final List<RecordField> fields = new ArrayList<>(columns.length);
for (TestColumn column : columns) {
fields.add(new RecordField(column.getColumnName(), column.getRecordFieldType()));
}
return new SimpleRecordSchema(fields);
}
private RecordSchema givenRecordSchemaWithOnlyStringType(TestColumn[] columns) {
final List<RecordField> fields = new ArrayList<>(columns.length);
for (TestColumn column : columns) {
fields.add(new RecordField(column.getColumnName(), RecordFieldType.STRING.getDataType()));
}
return new SimpleRecordSchema(fields);
}
private List<ArrayTestData> givenArrayTypesThatRequireLogicalTypes() {
List<ArrayTestData> testData = new ArrayList<>();
testData.add(new ArrayTestData("arrayBigDecimal",
new ResultBigDecimal[]{new ResultBigDecimal(), new ResultBigDecimal()}));
testData.add(new ArrayTestData("arrayDate",
new Date[]{new Date(TIMESTAMP_IN_MILLIS), new Date(TIMESTAMP_IN_MILLIS)}));
testData.add(new ArrayTestData("arrayTime",
new Time[]{new Time(TIMESTAMP_IN_MILLIS), new Time(TIMESTAMP_IN_MILLIS)}));
testData.add(new ArrayTestData("arrayTimestamp",
new Timestamp[]{new Timestamp(TIMESTAMP_IN_MILLIS), new Timestamp(TIMESTAMP_IN_MILLIS)}));
return testData;
}
private Map<String, DataType> givenExpectedTypesForArrayTypesThatRequireLogicalTypes(final boolean useLogicalTypes) {
Map<String, DataType> expectedTypes = new HashMap<>();
if (useLogicalTypes) {
expectedTypes.put("arrayBigDecimal", RecordFieldType.DECIMAL.getDecimalDataType(ResultBigDecimal.PRECISION, ResultBigDecimal.SCALE));
expectedTypes.put("arrayDate", RecordFieldType.DATE.getDataType());
expectedTypes.put("arrayTime", RecordFieldType.TIME.getDataType());
expectedTypes.put("arrayTimestamp", RecordFieldType.TIMESTAMP.getDataType());
} else {
expectedTypes.put("arrayBigDecimal", RecordFieldType.STRING.getDataType());
expectedTypes.put("arrayDate", RecordFieldType.STRING.getDataType());
expectedTypes.put("arrayTime", RecordFieldType.STRING.getDataType());
expectedTypes.put("arrayTimestamp", RecordFieldType.STRING.getDataType());
}
return expectedTypes;
}
private ResultSet givenResultSetForArrayThrowsException(boolean featureSupported) throws SQLException {
final ResultSet resultSet = Mockito.mock(ResultSet.class);
final ResultSetMetaData resultSetMetaData = Mockito.mock(ResultSetMetaData.class);
when(resultSet.getMetaData()).thenReturn(resultSetMetaData);
when(resultSet.getArray(ArgumentMatchers.anyInt())).thenThrow(featureSupported ? new SQLException("test exception") : new SQLFeatureNotSupportedException("not supported"));
when(resultSetMetaData.getColumnCount()).thenReturn(1);
when(resultSetMetaData.getColumnLabel(1)).thenReturn("column");
when(resultSetMetaData.getColumnType(1)).thenReturn(Types.ARRAY);
return resultSet;
}
private ResultSet givenResultSetForOther(List<RecordField> fields) throws SQLException {
final ResultSet resultSet = Mockito.mock(ResultSet.class);
final ResultSetMetaData resultSetMetaData = Mockito.mock(ResultSetMetaData.class);
when(resultSet.getMetaData()).thenReturn(resultSetMetaData);
when(resultSetMetaData.getColumnCount()).thenReturn(fields.size());
for (int i = 0; i < fields.size(); ++i) {
int columnIndex = i + 1;
when(resultSetMetaData.getColumnLabel(columnIndex)).thenReturn(fields.get(i).getFieldName());
when(resultSetMetaData.getColumnName(columnIndex)).thenReturn(fields.get(i).getFieldName());
when(resultSetMetaData.getColumnType(columnIndex)).thenReturn(Types.OTHER);
}
return resultSet;
}
private Record givenInputRecord() {
List<RecordField> inputRecordFields = new ArrayList<>(2);
inputRecordFields.add(new RecordField("id", RecordFieldType.INT.getDataType()));
inputRecordFields.add(new RecordField("name", RecordFieldType.STRING.getDataType()));
RecordSchema inputRecordSchema = new SimpleRecordSchema(inputRecordFields);
Map<String, Object> inputRecordData = new HashMap<>(2);
inputRecordData.put("id", 1);
inputRecordData.put("name", "John");
return new MapRecord(inputRecordSchema, inputRecordData);
}
private List<RecordField> givenFieldsThatAreOfTypeRecord(List<Record> concreteRecords) {
List<RecordField> fields = new ArrayList<>(concreteRecords.size());
int i = 1;
for (Record record : concreteRecords) {
fields.add(new RecordField("record" + i, RecordFieldType.RECORD.getRecordDataType(record.getSchema())));
++i;
}
return fields;
}
private List<RecordField> whenSchemaFieldsAreSetupForArrayType(final List<ArrayTestData> testData,
final ResultSet resultSet,
final ResultSetMetaData resultSetMetaData)
throws SQLException {
List<RecordField> fields = new ArrayList<>();
for (int i = 0; i < testData.size(); ++i) {
ArrayTestData testDatum = testData.get(i);
int columnIndex = i + 1;
ResultSqlArray arrayDummy = Mockito.mock(ResultSqlArray.class);
when(arrayDummy.getArray()).thenReturn(testDatum.getTestArray());
when(resultSet.getArray(columnIndex)).thenReturn(arrayDummy);
when(resultSetMetaData.getColumnLabel(columnIndex)).thenReturn(testDatum.getFieldName());
when(resultSetMetaData.getColumnType(columnIndex)).thenReturn(Types.ARRAY);
fields.add(new RecordField(testDatum.getFieldName(), RecordFieldType.ARRAY.getDataType()));
}
return fields;
}
private void thenAllDataTypesMatchInputFieldType(final List<RecordField> inputFields, final RecordSchema resultSchema) {
assertEquals(inputFields.size(), resultSchema.getFieldCount(), "The number of input fields does not match the number of fields in the result schema.");
for (int i = 0; i < inputFields.size(); ++i) {
assertEquals(inputFields.get(i).getDataType(), resultSchema.getField(i).getDataType());
}
}
private void thenAllDataTypesAreString(final RecordSchema resultSchema) {
for (int i = 0; i < resultSchema.getFieldCount(); ++i) {
assertEquals(RecordFieldType.STRING.getDataType(), resultSchema.getField(i).getDataType());
}
}
private void thenAllColumnDataTypesAreCorrect(TestColumn[] columns, RecordSchema expectedSchema, RecordSchema actualSchema) {
assertNotNull(actualSchema);
for (TestColumn column : columns) {
int fieldIndex = column.getIndex() - 1;
// The DECIMAL column with scale larger than precision will not match so verify that instead
DataType actualDataType = actualSchema.getField(fieldIndex).getDataType();
DataType expectedDataType = expectedSchema.getField(fieldIndex).getDataType();
if (expectedDataType.equals(RecordFieldType.DECIMAL.getDecimalDataType(3, 10))) {
DecimalDataType decimalDataType = (DecimalDataType) expectedDataType;
if (decimalDataType.getScale() > decimalDataType.getPrecision()) {
expectedDataType = RecordFieldType.DECIMAL.getDecimalDataType(decimalDataType.getScale(), decimalDataType.getScale());
}
}
assertEquals(expectedDataType, actualDataType, "For column " + column.getIndex() + " the converted type is not matching");
}
}
private void thenActualArrayElementTypesMatchExpected(Map<String, DataType> expectedTypes, RecordSchema actualSchema) {
for (RecordField recordField : actualSchema.getFields()) {
if (recordField.getDataType() instanceof ArrayDataType) {
ArrayDataType arrayType = (ArrayDataType) recordField.getDataType();
assertEquals(expectedTypes.get(recordField.getFieldName()), arrayType.getElementType(),
"Array element type for " + recordField.getFieldName()
+ " is not of expected type " + expectedTypes.get(recordField.getFieldName()).toString());
} else {
fail("RecordField " + recordField.getFieldName() + " is not instance of ArrayDataType");
}
}
}
private void thenAllDataTypesAreChoice(final List<RecordField> inputFields, final RecordSchema resultSchema) {
assertEquals(inputFields.size(), resultSchema.getFieldCount(), "The number of input fields does not match the number of fields in the result schema.");
DataType expectedType = getBroadestChoiceDataType();
for (int i = 0; i < inputFields.size(); ++i) {
assertEquals(expectedType, resultSchema.getField(i).getDataType());
}
}
private DataType getBroadestChoiceDataType() {
List<DataType> dataTypes = Stream.of(RecordFieldType.BIGINT, RecordFieldType.BOOLEAN, RecordFieldType.BYTE, RecordFieldType.CHAR, RecordFieldType.DATE,
RecordFieldType.DECIMAL, RecordFieldType.DOUBLE, RecordFieldType.FLOAT, RecordFieldType.INT, RecordFieldType.LONG, RecordFieldType.SHORT, RecordFieldType.STRING,
RecordFieldType.TIME, RecordFieldType.TIMESTAMP)
.map(RecordFieldType::getDataType)
.collect(Collectors.toList());
return RecordFieldType.CHOICE.getChoiceDataType(dataTypes);
}
private static class TestColumn {
private final int index; // Column indexing starts from 1, not 0.
private final String columnName;
private final int sqlType;
private final DataType recordFieldType;
public TestColumn(final int index, final String columnName, final int sqlType, final DataType recordFieldType) {
this.index = index;
this.columnName = columnName;
this.sqlType = sqlType;
this.recordFieldType = recordFieldType;
}
public int getIndex() {
return index;
}
public String getColumnName() {
return columnName;
}
public int getSqlType() {
return sqlType;
}
public DataType getRecordFieldType() {
return recordFieldType;
}
}
private static class ResultSqlArray implements Array {
@Override
public String getBaseTypeName() throws SQLException {
return null;
}
@Override
public int getBaseType() throws SQLException {
return 0;
}
@Override
public Object getArray() throws SQLException {
return null;
}
@Override
public Object getArray(Map<String, Class<?>> map) throws SQLException {
return null;
}
@Override
public Object getArray(long index, int count) throws SQLException {
return null;
}
@Override
public Object getArray(long index, int count, Map<String, Class<?>> map) throws SQLException {
return null;
}
@Override
public ResultSet getResultSet() throws SQLException {
return null;
}
@Override
public ResultSet getResultSet(Map<String, Class<?>> map) throws SQLException {
return null;
}
@Override
public ResultSet getResultSet(long index, int count) throws SQLException {
return null;
}
@Override
public ResultSet getResultSet(long index, int count, Map<String, Class<?>> map) throws SQLException {
return null;
}
@Override
public void free() throws SQLException {
}
}
private static class ResultBigDecimal extends BigDecimal {
public static int PRECISION = 3;
public static int SCALE = 0;
public ResultBigDecimal() {
super("123");
}
}
private static class ArrayTestData {
final private String fieldName;
final private Object[] testArray;
public ArrayTestData(String fieldName, Object[] testArray) {
this.fieldName = fieldName;
this.testArray = testArray;
}
public String getFieldName() {
return fieldName;
}
public Object[] getTestArray() {
return testArray;
}
}
} |
googleapis/google-cloud-java | 37,765 | java-private-catalog/proto-google-cloud-private-catalog-v1beta1/src/main/java/com/google/cloud/privatecatalog/v1beta1/SearchVersionsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/privatecatalog/v1beta1/private_catalog.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.privatecatalog.v1beta1;
/**
*
*
* <pre>
* Response message for [PrivateCatalog.SearchVersions][google.cloud.privatecatalog.v1beta1.PrivateCatalog.SearchVersions].
* </pre>
*
* Protobuf type {@code google.cloud.privatecatalog.v1beta1.SearchVersionsResponse}
*/
public final class SearchVersionsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.privatecatalog.v1beta1.SearchVersionsResponse)
SearchVersionsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchVersionsResponse.newBuilder() to construct.
private SearchVersionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchVersionsResponse() {
versions_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SearchVersionsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto
.internal_static_google_cloud_privatecatalog_v1beta1_SearchVersionsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto
.internal_static_google_cloud_privatecatalog_v1beta1_SearchVersionsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse.class,
com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse.Builder.class);
}
public static final int VERSIONS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.privatecatalog.v1beta1.Version> versions_;
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.privatecatalog.v1beta1.Version> getVersionsList() {
return versions_;
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.privatecatalog.v1beta1.VersionOrBuilder>
getVersionsOrBuilderList() {
return versions_;
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
@java.lang.Override
public int getVersionsCount() {
return versions_.size();
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
@java.lang.Override
public com.google.cloud.privatecatalog.v1beta1.Version getVersions(int index) {
return versions_.get(index);
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
@java.lang.Override
public com.google.cloud.privatecatalog.v1beta1.VersionOrBuilder getVersionsOrBuilder(int index) {
return versions_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchVersions that
* indicates from where the listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchVersions that
* indicates from where the listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < versions_.size(); i++) {
output.writeMessage(1, versions_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < versions_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, versions_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse)) {
return super.equals(obj);
}
com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse other =
(com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse) obj;
if (!getVersionsList().equals(other.getVersionsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getVersionsCount() > 0) {
hash = (37 * hash) + VERSIONS_FIELD_NUMBER;
hash = (53 * hash) + getVersionsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for [PrivateCatalog.SearchVersions][google.cloud.privatecatalog.v1beta1.PrivateCatalog.SearchVersions].
* </pre>
*
* Protobuf type {@code google.cloud.privatecatalog.v1beta1.SearchVersionsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.privatecatalog.v1beta1.SearchVersionsResponse)
com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto
.internal_static_google_cloud_privatecatalog_v1beta1_SearchVersionsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto
.internal_static_google_cloud_privatecatalog_v1beta1_SearchVersionsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse.class,
com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse.Builder.class);
}
// Construct using com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (versionsBuilder_ == null) {
versions_ = java.util.Collections.emptyList();
} else {
versions_ = null;
versionsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto
.internal_static_google_cloud_privatecatalog_v1beta1_SearchVersionsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse
getDefaultInstanceForType() {
return com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse build() {
com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse buildPartial() {
com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse result =
new com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse result) {
if (versionsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
versions_ = java.util.Collections.unmodifiableList(versions_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.versions_ = versions_;
} else {
result.versions_ = versionsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse) {
return mergeFrom((com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse other) {
if (other
== com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse.getDefaultInstance())
return this;
if (versionsBuilder_ == null) {
if (!other.versions_.isEmpty()) {
if (versions_.isEmpty()) {
versions_ = other.versions_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureVersionsIsMutable();
versions_.addAll(other.versions_);
}
onChanged();
}
} else {
if (!other.versions_.isEmpty()) {
if (versionsBuilder_.isEmpty()) {
versionsBuilder_.dispose();
versionsBuilder_ = null;
versions_ = other.versions_;
bitField0_ = (bitField0_ & ~0x00000001);
versionsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getVersionsFieldBuilder()
: null;
} else {
versionsBuilder_.addAllMessages(other.versions_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.privatecatalog.v1beta1.Version m =
input.readMessage(
com.google.cloud.privatecatalog.v1beta1.Version.parser(),
extensionRegistry);
if (versionsBuilder_ == null) {
ensureVersionsIsMutable();
versions_.add(m);
} else {
versionsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.privatecatalog.v1beta1.Version> versions_ =
java.util.Collections.emptyList();
private void ensureVersionsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
versions_ =
new java.util.ArrayList<com.google.cloud.privatecatalog.v1beta1.Version>(versions_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.privatecatalog.v1beta1.Version,
com.google.cloud.privatecatalog.v1beta1.Version.Builder,
com.google.cloud.privatecatalog.v1beta1.VersionOrBuilder>
versionsBuilder_;
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
public java.util.List<com.google.cloud.privatecatalog.v1beta1.Version> getVersionsList() {
if (versionsBuilder_ == null) {
return java.util.Collections.unmodifiableList(versions_);
} else {
return versionsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
public int getVersionsCount() {
if (versionsBuilder_ == null) {
return versions_.size();
} else {
return versionsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
public com.google.cloud.privatecatalog.v1beta1.Version getVersions(int index) {
if (versionsBuilder_ == null) {
return versions_.get(index);
} else {
return versionsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
public Builder setVersions(int index, com.google.cloud.privatecatalog.v1beta1.Version value) {
if (versionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureVersionsIsMutable();
versions_.set(index, value);
onChanged();
} else {
versionsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
public Builder setVersions(
int index, com.google.cloud.privatecatalog.v1beta1.Version.Builder builderForValue) {
if (versionsBuilder_ == null) {
ensureVersionsIsMutable();
versions_.set(index, builderForValue.build());
onChanged();
} else {
versionsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
public Builder addVersions(com.google.cloud.privatecatalog.v1beta1.Version value) {
if (versionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureVersionsIsMutable();
versions_.add(value);
onChanged();
} else {
versionsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
public Builder addVersions(int index, com.google.cloud.privatecatalog.v1beta1.Version value) {
if (versionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureVersionsIsMutable();
versions_.add(index, value);
onChanged();
} else {
versionsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
public Builder addVersions(
com.google.cloud.privatecatalog.v1beta1.Version.Builder builderForValue) {
if (versionsBuilder_ == null) {
ensureVersionsIsMutable();
versions_.add(builderForValue.build());
onChanged();
} else {
versionsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
public Builder addVersions(
int index, com.google.cloud.privatecatalog.v1beta1.Version.Builder builderForValue) {
if (versionsBuilder_ == null) {
ensureVersionsIsMutable();
versions_.add(index, builderForValue.build());
onChanged();
} else {
versionsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
public Builder addAllVersions(
java.lang.Iterable<? extends com.google.cloud.privatecatalog.v1beta1.Version> values) {
if (versionsBuilder_ == null) {
ensureVersionsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, versions_);
onChanged();
} else {
versionsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
public Builder clearVersions() {
if (versionsBuilder_ == null) {
versions_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
versionsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
public Builder removeVersions(int index) {
if (versionsBuilder_ == null) {
ensureVersionsIsMutable();
versions_.remove(index);
onChanged();
} else {
versionsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
public com.google.cloud.privatecatalog.v1beta1.Version.Builder getVersionsBuilder(int index) {
return getVersionsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
public com.google.cloud.privatecatalog.v1beta1.VersionOrBuilder getVersionsOrBuilder(
int index) {
if (versionsBuilder_ == null) {
return versions_.get(index);
} else {
return versionsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
public java.util.List<? extends com.google.cloud.privatecatalog.v1beta1.VersionOrBuilder>
getVersionsOrBuilderList() {
if (versionsBuilder_ != null) {
return versionsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(versions_);
}
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
public com.google.cloud.privatecatalog.v1beta1.Version.Builder addVersionsBuilder() {
return getVersionsFieldBuilder()
.addBuilder(com.google.cloud.privatecatalog.v1beta1.Version.getDefaultInstance());
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
public com.google.cloud.privatecatalog.v1beta1.Version.Builder addVersionsBuilder(int index) {
return getVersionsFieldBuilder()
.addBuilder(index, com.google.cloud.privatecatalog.v1beta1.Version.getDefaultInstance());
}
/**
*
*
* <pre>
* The `Version` resources computed from the resource context.
* </pre>
*
* <code>repeated .google.cloud.privatecatalog.v1beta1.Version versions = 1;</code>
*/
public java.util.List<com.google.cloud.privatecatalog.v1beta1.Version.Builder>
getVersionsBuilderList() {
return getVersionsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.privatecatalog.v1beta1.Version,
com.google.cloud.privatecatalog.v1beta1.Version.Builder,
com.google.cloud.privatecatalog.v1beta1.VersionOrBuilder>
getVersionsFieldBuilder() {
if (versionsBuilder_ == null) {
versionsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.privatecatalog.v1beta1.Version,
com.google.cloud.privatecatalog.v1beta1.Version.Builder,
com.google.cloud.privatecatalog.v1beta1.VersionOrBuilder>(
versions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
versions_ = null;
}
return versionsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchVersions that
* indicates from where the listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchVersions that
* indicates from where the listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchVersions that
* indicates from where the listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchVersions that
* indicates from where the listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchVersions that
* indicates from where the listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.privatecatalog.v1beta1.SearchVersionsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.privatecatalog.v1beta1.SearchVersionsResponse)
private static final com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse();
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchVersionsResponse> PARSER =
new com.google.protobuf.AbstractParser<SearchVersionsResponse>() {
@java.lang.Override
public SearchVersionsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SearchVersionsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchVersionsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.privatecatalog.v1beta1.SearchVersionsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,786 | java-chronicle/proto-google-cloud-chronicle-v1/src/main/java/com/google/cloud/chronicle/v1/ListRuleDeploymentsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/chronicle/v1/rule.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.chronicle.v1;
/**
*
*
* <pre>
* Response message for ListRuleDeployments.
* </pre>
*
* Protobuf type {@code google.cloud.chronicle.v1.ListRuleDeploymentsResponse}
*/
public final class ListRuleDeploymentsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.chronicle.v1.ListRuleDeploymentsResponse)
ListRuleDeploymentsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListRuleDeploymentsResponse.newBuilder() to construct.
private ListRuleDeploymentsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListRuleDeploymentsResponse() {
ruleDeployments_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListRuleDeploymentsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.chronicle.v1.RuleProto
.internal_static_google_cloud_chronicle_v1_ListRuleDeploymentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.chronicle.v1.RuleProto
.internal_static_google_cloud_chronicle_v1_ListRuleDeploymentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse.class,
com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse.Builder.class);
}
public static final int RULE_DEPLOYMENTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.chronicle.v1.RuleDeployment> ruleDeployments_;
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.chronicle.v1.RuleDeployment> getRuleDeploymentsList() {
return ruleDeployments_;
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.chronicle.v1.RuleDeploymentOrBuilder>
getRuleDeploymentsOrBuilderList() {
return ruleDeployments_;
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
@java.lang.Override
public int getRuleDeploymentsCount() {
return ruleDeployments_.size();
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
@java.lang.Override
public com.google.cloud.chronicle.v1.RuleDeployment getRuleDeployments(int index) {
return ruleDeployments_.get(index);
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
@java.lang.Override
public com.google.cloud.chronicle.v1.RuleDeploymentOrBuilder getRuleDeploymentsOrBuilder(
int index) {
return ruleDeployments_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < ruleDeployments_.size(); i++) {
output.writeMessage(1, ruleDeployments_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < ruleDeployments_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, ruleDeployments_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse)) {
return super.equals(obj);
}
com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse other =
(com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse) obj;
if (!getRuleDeploymentsList().equals(other.getRuleDeploymentsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getRuleDeploymentsCount() > 0) {
hash = (37 * hash) + RULE_DEPLOYMENTS_FIELD_NUMBER;
hash = (53 * hash) + getRuleDeploymentsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for ListRuleDeployments.
* </pre>
*
* Protobuf type {@code google.cloud.chronicle.v1.ListRuleDeploymentsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.chronicle.v1.ListRuleDeploymentsResponse)
com.google.cloud.chronicle.v1.ListRuleDeploymentsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.chronicle.v1.RuleProto
.internal_static_google_cloud_chronicle_v1_ListRuleDeploymentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.chronicle.v1.RuleProto
.internal_static_google_cloud_chronicle_v1_ListRuleDeploymentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse.class,
com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse.Builder.class);
}
// Construct using com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (ruleDeploymentsBuilder_ == null) {
ruleDeployments_ = java.util.Collections.emptyList();
} else {
ruleDeployments_ = null;
ruleDeploymentsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.chronicle.v1.RuleProto
.internal_static_google_cloud_chronicle_v1_ListRuleDeploymentsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse getDefaultInstanceForType() {
return com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse build() {
com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse buildPartial() {
com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse result =
new com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse result) {
if (ruleDeploymentsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
ruleDeployments_ = java.util.Collections.unmodifiableList(ruleDeployments_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.ruleDeployments_ = ruleDeployments_;
} else {
result.ruleDeployments_ = ruleDeploymentsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse) {
return mergeFrom((com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse other) {
if (other == com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse.getDefaultInstance())
return this;
if (ruleDeploymentsBuilder_ == null) {
if (!other.ruleDeployments_.isEmpty()) {
if (ruleDeployments_.isEmpty()) {
ruleDeployments_ = other.ruleDeployments_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureRuleDeploymentsIsMutable();
ruleDeployments_.addAll(other.ruleDeployments_);
}
onChanged();
}
} else {
if (!other.ruleDeployments_.isEmpty()) {
if (ruleDeploymentsBuilder_.isEmpty()) {
ruleDeploymentsBuilder_.dispose();
ruleDeploymentsBuilder_ = null;
ruleDeployments_ = other.ruleDeployments_;
bitField0_ = (bitField0_ & ~0x00000001);
ruleDeploymentsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getRuleDeploymentsFieldBuilder()
: null;
} else {
ruleDeploymentsBuilder_.addAllMessages(other.ruleDeployments_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.chronicle.v1.RuleDeployment m =
input.readMessage(
com.google.cloud.chronicle.v1.RuleDeployment.parser(), extensionRegistry);
if (ruleDeploymentsBuilder_ == null) {
ensureRuleDeploymentsIsMutable();
ruleDeployments_.add(m);
} else {
ruleDeploymentsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.chronicle.v1.RuleDeployment> ruleDeployments_ =
java.util.Collections.emptyList();
private void ensureRuleDeploymentsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
ruleDeployments_ =
new java.util.ArrayList<com.google.cloud.chronicle.v1.RuleDeployment>(ruleDeployments_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.chronicle.v1.RuleDeployment,
com.google.cloud.chronicle.v1.RuleDeployment.Builder,
com.google.cloud.chronicle.v1.RuleDeploymentOrBuilder>
ruleDeploymentsBuilder_;
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
public java.util.List<com.google.cloud.chronicle.v1.RuleDeployment> getRuleDeploymentsList() {
if (ruleDeploymentsBuilder_ == null) {
return java.util.Collections.unmodifiableList(ruleDeployments_);
} else {
return ruleDeploymentsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
public int getRuleDeploymentsCount() {
if (ruleDeploymentsBuilder_ == null) {
return ruleDeployments_.size();
} else {
return ruleDeploymentsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
public com.google.cloud.chronicle.v1.RuleDeployment getRuleDeployments(int index) {
if (ruleDeploymentsBuilder_ == null) {
return ruleDeployments_.get(index);
} else {
return ruleDeploymentsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
public Builder setRuleDeployments(
int index, com.google.cloud.chronicle.v1.RuleDeployment value) {
if (ruleDeploymentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRuleDeploymentsIsMutable();
ruleDeployments_.set(index, value);
onChanged();
} else {
ruleDeploymentsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
public Builder setRuleDeployments(
int index, com.google.cloud.chronicle.v1.RuleDeployment.Builder builderForValue) {
if (ruleDeploymentsBuilder_ == null) {
ensureRuleDeploymentsIsMutable();
ruleDeployments_.set(index, builderForValue.build());
onChanged();
} else {
ruleDeploymentsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
public Builder addRuleDeployments(com.google.cloud.chronicle.v1.RuleDeployment value) {
if (ruleDeploymentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRuleDeploymentsIsMutable();
ruleDeployments_.add(value);
onChanged();
} else {
ruleDeploymentsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
public Builder addRuleDeployments(
int index, com.google.cloud.chronicle.v1.RuleDeployment value) {
if (ruleDeploymentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRuleDeploymentsIsMutable();
ruleDeployments_.add(index, value);
onChanged();
} else {
ruleDeploymentsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
public Builder addRuleDeployments(
com.google.cloud.chronicle.v1.RuleDeployment.Builder builderForValue) {
if (ruleDeploymentsBuilder_ == null) {
ensureRuleDeploymentsIsMutable();
ruleDeployments_.add(builderForValue.build());
onChanged();
} else {
ruleDeploymentsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
public Builder addRuleDeployments(
int index, com.google.cloud.chronicle.v1.RuleDeployment.Builder builderForValue) {
if (ruleDeploymentsBuilder_ == null) {
ensureRuleDeploymentsIsMutable();
ruleDeployments_.add(index, builderForValue.build());
onChanged();
} else {
ruleDeploymentsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
public Builder addAllRuleDeployments(
java.lang.Iterable<? extends com.google.cloud.chronicle.v1.RuleDeployment> values) {
if (ruleDeploymentsBuilder_ == null) {
ensureRuleDeploymentsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, ruleDeployments_);
onChanged();
} else {
ruleDeploymentsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
public Builder clearRuleDeployments() {
if (ruleDeploymentsBuilder_ == null) {
ruleDeployments_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
ruleDeploymentsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
public Builder removeRuleDeployments(int index) {
if (ruleDeploymentsBuilder_ == null) {
ensureRuleDeploymentsIsMutable();
ruleDeployments_.remove(index);
onChanged();
} else {
ruleDeploymentsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
public com.google.cloud.chronicle.v1.RuleDeployment.Builder getRuleDeploymentsBuilder(
int index) {
return getRuleDeploymentsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
public com.google.cloud.chronicle.v1.RuleDeploymentOrBuilder getRuleDeploymentsOrBuilder(
int index) {
if (ruleDeploymentsBuilder_ == null) {
return ruleDeployments_.get(index);
} else {
return ruleDeploymentsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
public java.util.List<? extends com.google.cloud.chronicle.v1.RuleDeploymentOrBuilder>
getRuleDeploymentsOrBuilderList() {
if (ruleDeploymentsBuilder_ != null) {
return ruleDeploymentsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(ruleDeployments_);
}
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
public com.google.cloud.chronicle.v1.RuleDeployment.Builder addRuleDeploymentsBuilder() {
return getRuleDeploymentsFieldBuilder()
.addBuilder(com.google.cloud.chronicle.v1.RuleDeployment.getDefaultInstance());
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
public com.google.cloud.chronicle.v1.RuleDeployment.Builder addRuleDeploymentsBuilder(
int index) {
return getRuleDeploymentsFieldBuilder()
.addBuilder(index, com.google.cloud.chronicle.v1.RuleDeployment.getDefaultInstance());
}
/**
*
*
* <pre>
* The rule deployments from all rules.
* </pre>
*
* <code>repeated .google.cloud.chronicle.v1.RuleDeployment rule_deployments = 1;</code>
*/
public java.util.List<com.google.cloud.chronicle.v1.RuleDeployment.Builder>
getRuleDeploymentsBuilderList() {
return getRuleDeploymentsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.chronicle.v1.RuleDeployment,
com.google.cloud.chronicle.v1.RuleDeployment.Builder,
com.google.cloud.chronicle.v1.RuleDeploymentOrBuilder>
getRuleDeploymentsFieldBuilder() {
if (ruleDeploymentsBuilder_ == null) {
ruleDeploymentsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.chronicle.v1.RuleDeployment,
com.google.cloud.chronicle.v1.RuleDeployment.Builder,
com.google.cloud.chronicle.v1.RuleDeploymentOrBuilder>(
ruleDeployments_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
ruleDeployments_ = null;
}
return ruleDeploymentsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.chronicle.v1.ListRuleDeploymentsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.chronicle.v1.ListRuleDeploymentsResponse)
private static final com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse();
}
public static com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListRuleDeploymentsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListRuleDeploymentsResponse>() {
@java.lang.Override
public ListRuleDeploymentsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListRuleDeploymentsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListRuleDeploymentsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.chronicle.v1.ListRuleDeploymentsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,716 | java-analytics-admin/proto-google-analytics-admin-v1alpha/src/main/java/com/google/analytics/admin/v1alpha/EventMapping.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/admin/v1alpha/resources.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.admin.v1alpha;
/**
*
*
* <pre>
* Event setting conditions to match an event.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.EventMapping}
*/
public final class EventMapping extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.admin.v1alpha.EventMapping)
EventMappingOrBuilder {
private static final long serialVersionUID = 0L;
// Use EventMapping.newBuilder() to construct.
private EventMapping(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private EventMapping() {
eventName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new EventMapping();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.ResourcesProto
.internal_static_google_analytics_admin_v1alpha_EventMapping_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.ResourcesProto
.internal_static_google_analytics_admin_v1alpha_EventMapping_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.EventMapping.class,
com.google.analytics.admin.v1alpha.EventMapping.Builder.class);
}
private int bitField0_;
public static final int EVENT_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object eventName_ = "";
/**
*
*
* <pre>
* Required. Name of the Google Analytics event. It must always be set.
* The max allowed display name length is 40 UTF-16 code units.
* </pre>
*
* <code>string event_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The eventName.
*/
@java.lang.Override
public java.lang.String getEventName() {
java.lang.Object ref = eventName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
eventName_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Name of the Google Analytics event. It must always be set.
* The max allowed display name length is 40 UTF-16 code units.
* </pre>
*
* <code>string event_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for eventName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getEventNameBytes() {
java.lang.Object ref = eventName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
eventName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int MIN_EVENT_COUNT_FIELD_NUMBER = 2;
private long minEventCount_ = 0L;
/**
*
*
* <pre>
* At least one of the following four min/max values must be set. The
* values set will be ANDed together to qualify an event.
* The minimum number of times the event occurred. If not set, minimum event
* count won't be checked.
* </pre>
*
* <code>optional int64 min_event_count = 2;</code>
*
* @return Whether the minEventCount field is set.
*/
@java.lang.Override
public boolean hasMinEventCount() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* At least one of the following four min/max values must be set. The
* values set will be ANDed together to qualify an event.
* The minimum number of times the event occurred. If not set, minimum event
* count won't be checked.
* </pre>
*
* <code>optional int64 min_event_count = 2;</code>
*
* @return The minEventCount.
*/
@java.lang.Override
public long getMinEventCount() {
return minEventCount_;
}
public static final int MAX_EVENT_COUNT_FIELD_NUMBER = 3;
private long maxEventCount_ = 0L;
/**
*
*
* <pre>
* The maximum number of times the event occurred. If not set, maximum event
* count won't be checked.
* </pre>
*
* <code>optional int64 max_event_count = 3;</code>
*
* @return Whether the maxEventCount field is set.
*/
@java.lang.Override
public boolean hasMaxEventCount() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The maximum number of times the event occurred. If not set, maximum event
* count won't be checked.
* </pre>
*
* <code>optional int64 max_event_count = 3;</code>
*
* @return The maxEventCount.
*/
@java.lang.Override
public long getMaxEventCount() {
return maxEventCount_;
}
public static final int MIN_EVENT_VALUE_FIELD_NUMBER = 4;
private double minEventValue_ = 0D;
/**
*
*
* <pre>
* The minimum revenue generated due to the event. Revenue currency will be
* defined at the property level. If not set, minimum event value won't be
* checked.
* </pre>
*
* <code>optional double min_event_value = 4;</code>
*
* @return Whether the minEventValue field is set.
*/
@java.lang.Override
public boolean hasMinEventValue() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* The minimum revenue generated due to the event. Revenue currency will be
* defined at the property level. If not set, minimum event value won't be
* checked.
* </pre>
*
* <code>optional double min_event_value = 4;</code>
*
* @return The minEventValue.
*/
@java.lang.Override
public double getMinEventValue() {
return minEventValue_;
}
public static final int MAX_EVENT_VALUE_FIELD_NUMBER = 5;
private double maxEventValue_ = 0D;
/**
*
*
* <pre>
* The maximum revenue generated due to the event. Revenue currency will be
* defined at the property level. If not set, maximum event value won't be
* checked.
* </pre>
*
* <code>optional double max_event_value = 5;</code>
*
* @return Whether the maxEventValue field is set.
*/
@java.lang.Override
public boolean hasMaxEventValue() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
*
*
* <pre>
* The maximum revenue generated due to the event. Revenue currency will be
* defined at the property level. If not set, maximum event value won't be
* checked.
* </pre>
*
* <code>optional double max_event_value = 5;</code>
*
* @return The maxEventValue.
*/
@java.lang.Override
public double getMaxEventValue() {
return maxEventValue_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(eventName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, eventName_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt64(2, minEventCount_);
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeInt64(3, maxEventCount_);
}
if (((bitField0_ & 0x00000004) != 0)) {
output.writeDouble(4, minEventValue_);
}
if (((bitField0_ & 0x00000008) != 0)) {
output.writeDouble(5, maxEventValue_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(eventName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, eventName_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(2, minEventCount_);
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(3, maxEventCount_);
}
if (((bitField0_ & 0x00000004) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeDoubleSize(4, minEventValue_);
}
if (((bitField0_ & 0x00000008) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeDoubleSize(5, maxEventValue_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.admin.v1alpha.EventMapping)) {
return super.equals(obj);
}
com.google.analytics.admin.v1alpha.EventMapping other =
(com.google.analytics.admin.v1alpha.EventMapping) obj;
if (!getEventName().equals(other.getEventName())) return false;
if (hasMinEventCount() != other.hasMinEventCount()) return false;
if (hasMinEventCount()) {
if (getMinEventCount() != other.getMinEventCount()) return false;
}
if (hasMaxEventCount() != other.hasMaxEventCount()) return false;
if (hasMaxEventCount()) {
if (getMaxEventCount() != other.getMaxEventCount()) return false;
}
if (hasMinEventValue() != other.hasMinEventValue()) return false;
if (hasMinEventValue()) {
if (java.lang.Double.doubleToLongBits(getMinEventValue())
!= java.lang.Double.doubleToLongBits(other.getMinEventValue())) return false;
}
if (hasMaxEventValue() != other.hasMaxEventValue()) return false;
if (hasMaxEventValue()) {
if (java.lang.Double.doubleToLongBits(getMaxEventValue())
!= java.lang.Double.doubleToLongBits(other.getMaxEventValue())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + EVENT_NAME_FIELD_NUMBER;
hash = (53 * hash) + getEventName().hashCode();
if (hasMinEventCount()) {
hash = (37 * hash) + MIN_EVENT_COUNT_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getMinEventCount());
}
if (hasMaxEventCount()) {
hash = (37 * hash) + MAX_EVENT_COUNT_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getMaxEventCount());
}
if (hasMinEventValue()) {
hash = (37 * hash) + MIN_EVENT_VALUE_FIELD_NUMBER;
hash =
(53 * hash)
+ com.google.protobuf.Internal.hashLong(
java.lang.Double.doubleToLongBits(getMinEventValue()));
}
if (hasMaxEventValue()) {
hash = (37 * hash) + MAX_EVENT_VALUE_FIELD_NUMBER;
hash =
(53 * hash)
+ com.google.protobuf.Internal.hashLong(
java.lang.Double.doubleToLongBits(getMaxEventValue()));
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.admin.v1alpha.EventMapping parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.EventMapping parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.EventMapping parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.EventMapping parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.EventMapping parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.EventMapping parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.EventMapping parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.EventMapping parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.EventMapping parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.EventMapping parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.EventMapping parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.EventMapping parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.analytics.admin.v1alpha.EventMapping prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Event setting conditions to match an event.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.EventMapping}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.admin.v1alpha.EventMapping)
com.google.analytics.admin.v1alpha.EventMappingOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.ResourcesProto
.internal_static_google_analytics_admin_v1alpha_EventMapping_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.ResourcesProto
.internal_static_google_analytics_admin_v1alpha_EventMapping_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.EventMapping.class,
com.google.analytics.admin.v1alpha.EventMapping.Builder.class);
}
// Construct using com.google.analytics.admin.v1alpha.EventMapping.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
eventName_ = "";
minEventCount_ = 0L;
maxEventCount_ = 0L;
minEventValue_ = 0D;
maxEventValue_ = 0D;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.admin.v1alpha.ResourcesProto
.internal_static_google_analytics_admin_v1alpha_EventMapping_descriptor;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.EventMapping getDefaultInstanceForType() {
return com.google.analytics.admin.v1alpha.EventMapping.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.EventMapping build() {
com.google.analytics.admin.v1alpha.EventMapping result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.EventMapping buildPartial() {
com.google.analytics.admin.v1alpha.EventMapping result =
new com.google.analytics.admin.v1alpha.EventMapping(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.analytics.admin.v1alpha.EventMapping result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.eventName_ = eventName_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.minEventCount_ = minEventCount_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.maxEventCount_ = maxEventCount_;
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.minEventValue_ = minEventValue_;
to_bitField0_ |= 0x00000004;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.maxEventValue_ = maxEventValue_;
to_bitField0_ |= 0x00000008;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.admin.v1alpha.EventMapping) {
return mergeFrom((com.google.analytics.admin.v1alpha.EventMapping) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.analytics.admin.v1alpha.EventMapping other) {
if (other == com.google.analytics.admin.v1alpha.EventMapping.getDefaultInstance())
return this;
if (!other.getEventName().isEmpty()) {
eventName_ = other.eventName_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasMinEventCount()) {
setMinEventCount(other.getMinEventCount());
}
if (other.hasMaxEventCount()) {
setMaxEventCount(other.getMaxEventCount());
}
if (other.hasMinEventValue()) {
setMinEventValue(other.getMinEventValue());
}
if (other.hasMaxEventValue()) {
setMaxEventValue(other.getMaxEventValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
eventName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
minEventCount_ = input.readInt64();
bitField0_ |= 0x00000002;
break;
} // case 16
case 24:
{
maxEventCount_ = input.readInt64();
bitField0_ |= 0x00000004;
break;
} // case 24
case 33:
{
minEventValue_ = input.readDouble();
bitField0_ |= 0x00000008;
break;
} // case 33
case 41:
{
maxEventValue_ = input.readDouble();
bitField0_ |= 0x00000010;
break;
} // case 41
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object eventName_ = "";
/**
*
*
* <pre>
* Required. Name of the Google Analytics event. It must always be set.
* The max allowed display name length is 40 UTF-16 code units.
* </pre>
*
* <code>string event_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The eventName.
*/
public java.lang.String getEventName() {
java.lang.Object ref = eventName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
eventName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the Google Analytics event. It must always be set.
* The max allowed display name length is 40 UTF-16 code units.
* </pre>
*
* <code>string event_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for eventName.
*/
public com.google.protobuf.ByteString getEventNameBytes() {
java.lang.Object ref = eventName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
eventName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the Google Analytics event. It must always be set.
* The max allowed display name length is 40 UTF-16 code units.
* </pre>
*
* <code>string event_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The eventName to set.
* @return This builder for chaining.
*/
public Builder setEventName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
eventName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the Google Analytics event. It must always be set.
* The max allowed display name length is 40 UTF-16 code units.
* </pre>
*
* <code>string event_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearEventName() {
eventName_ = getDefaultInstance().getEventName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the Google Analytics event. It must always be set.
* The max allowed display name length is 40 UTF-16 code units.
* </pre>
*
* <code>string event_name = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for eventName to set.
* @return This builder for chaining.
*/
public Builder setEventNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
eventName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private long minEventCount_;
/**
*
*
* <pre>
* At least one of the following four min/max values must be set. The
* values set will be ANDed together to qualify an event.
* The minimum number of times the event occurred. If not set, minimum event
* count won't be checked.
* </pre>
*
* <code>optional int64 min_event_count = 2;</code>
*
* @return Whether the minEventCount field is set.
*/
@java.lang.Override
public boolean hasMinEventCount() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* At least one of the following four min/max values must be set. The
* values set will be ANDed together to qualify an event.
* The minimum number of times the event occurred. If not set, minimum event
* count won't be checked.
* </pre>
*
* <code>optional int64 min_event_count = 2;</code>
*
* @return The minEventCount.
*/
@java.lang.Override
public long getMinEventCount() {
return minEventCount_;
}
/**
*
*
* <pre>
* At least one of the following four min/max values must be set. The
* values set will be ANDed together to qualify an event.
* The minimum number of times the event occurred. If not set, minimum event
* count won't be checked.
* </pre>
*
* <code>optional int64 min_event_count = 2;</code>
*
* @param value The minEventCount to set.
* @return This builder for chaining.
*/
public Builder setMinEventCount(long value) {
minEventCount_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* At least one of the following four min/max values must be set. The
* values set will be ANDed together to qualify an event.
* The minimum number of times the event occurred. If not set, minimum event
* count won't be checked.
* </pre>
*
* <code>optional int64 min_event_count = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearMinEventCount() {
bitField0_ = (bitField0_ & ~0x00000002);
minEventCount_ = 0L;
onChanged();
return this;
}
private long maxEventCount_;
/**
*
*
* <pre>
* The maximum number of times the event occurred. If not set, maximum event
* count won't be checked.
* </pre>
*
* <code>optional int64 max_event_count = 3;</code>
*
* @return Whether the maxEventCount field is set.
*/
@java.lang.Override
public boolean hasMaxEventCount() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* The maximum number of times the event occurred. If not set, maximum event
* count won't be checked.
* </pre>
*
* <code>optional int64 max_event_count = 3;</code>
*
* @return The maxEventCount.
*/
@java.lang.Override
public long getMaxEventCount() {
return maxEventCount_;
}
/**
*
*
* <pre>
* The maximum number of times the event occurred. If not set, maximum event
* count won't be checked.
* </pre>
*
* <code>optional int64 max_event_count = 3;</code>
*
* @param value The maxEventCount to set.
* @return This builder for chaining.
*/
public Builder setMaxEventCount(long value) {
maxEventCount_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of times the event occurred. If not set, maximum event
* count won't be checked.
* </pre>
*
* <code>optional int64 max_event_count = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearMaxEventCount() {
bitField0_ = (bitField0_ & ~0x00000004);
maxEventCount_ = 0L;
onChanged();
return this;
}
private double minEventValue_;
/**
*
*
* <pre>
* The minimum revenue generated due to the event. Revenue currency will be
* defined at the property level. If not set, minimum event value won't be
* checked.
* </pre>
*
* <code>optional double min_event_value = 4;</code>
*
* @return Whether the minEventValue field is set.
*/
@java.lang.Override
public boolean hasMinEventValue() {
return ((bitField0_ & 0x00000008) != 0);
}
/**
*
*
* <pre>
* The minimum revenue generated due to the event. Revenue currency will be
* defined at the property level. If not set, minimum event value won't be
* checked.
* </pre>
*
* <code>optional double min_event_value = 4;</code>
*
* @return The minEventValue.
*/
@java.lang.Override
public double getMinEventValue() {
return minEventValue_;
}
/**
*
*
* <pre>
* The minimum revenue generated due to the event. Revenue currency will be
* defined at the property level. If not set, minimum event value won't be
* checked.
* </pre>
*
* <code>optional double min_event_value = 4;</code>
*
* @param value The minEventValue to set.
* @return This builder for chaining.
*/
public Builder setMinEventValue(double value) {
minEventValue_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* The minimum revenue generated due to the event. Revenue currency will be
* defined at the property level. If not set, minimum event value won't be
* checked.
* </pre>
*
* <code>optional double min_event_value = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearMinEventValue() {
bitField0_ = (bitField0_ & ~0x00000008);
minEventValue_ = 0D;
onChanged();
return this;
}
private double maxEventValue_;
/**
*
*
* <pre>
* The maximum revenue generated due to the event. Revenue currency will be
* defined at the property level. If not set, maximum event value won't be
* checked.
* </pre>
*
* <code>optional double max_event_value = 5;</code>
*
* @return Whether the maxEventValue field is set.
*/
@java.lang.Override
public boolean hasMaxEventValue() {
return ((bitField0_ & 0x00000010) != 0);
}
/**
*
*
* <pre>
* The maximum revenue generated due to the event. Revenue currency will be
* defined at the property level. If not set, maximum event value won't be
* checked.
* </pre>
*
* <code>optional double max_event_value = 5;</code>
*
* @return The maxEventValue.
*/
@java.lang.Override
public double getMaxEventValue() {
return maxEventValue_;
}
/**
*
*
* <pre>
* The maximum revenue generated due to the event. Revenue currency will be
* defined at the property level. If not set, maximum event value won't be
* checked.
* </pre>
*
* <code>optional double max_event_value = 5;</code>
*
* @param value The maxEventValue to set.
* @return This builder for chaining.
*/
public Builder setMaxEventValue(double value) {
maxEventValue_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum revenue generated due to the event. Revenue currency will be
* defined at the property level. If not set, maximum event value won't be
* checked.
* </pre>
*
* <code>optional double max_event_value = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearMaxEventValue() {
bitField0_ = (bitField0_ & ~0x00000010);
maxEventValue_ = 0D;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.admin.v1alpha.EventMapping)
}
// @@protoc_insertion_point(class_scope:google.analytics.admin.v1alpha.EventMapping)
private static final com.google.analytics.admin.v1alpha.EventMapping DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.admin.v1alpha.EventMapping();
}
public static com.google.analytics.admin.v1alpha.EventMapping getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<EventMapping> PARSER =
new com.google.protobuf.AbstractParser<EventMapping>() {
@java.lang.Override
public EventMapping parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<EventMapping> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<EventMapping> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.EventMapping getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,627 | java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/ListDraftsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/visionai/v1/platform.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.visionai.v1;
/**
*
*
* <pre>
* Message for requesting list of Drafts.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.ListDraftsRequest}
*/
public final class ListDraftsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.ListDraftsRequest)
ListDraftsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDraftsRequest.newBuilder() to construct.
private ListDraftsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDraftsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
orderBy_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDraftsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.PlatformProto
.internal_static_google_cloud_visionai_v1_ListDraftsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.PlatformProto
.internal_static_google_cloud_visionai_v1_ListDraftsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.ListDraftsRequest.class,
com.google.cloud.visionai.v1.ListDraftsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for ListDraftsRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListDraftsRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ORDER_BY_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
@java.lang.Override
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
}
}
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, orderBy_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, orderBy_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.visionai.v1.ListDraftsRequest)) {
return super.equals(obj);
}
com.google.cloud.visionai.v1.ListDraftsRequest other =
(com.google.cloud.visionai.v1.ListDraftsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getOrderBy().equals(other.getOrderBy())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + ORDER_BY_FIELD_NUMBER;
hash = (53 * hash) + getOrderBy().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.visionai.v1.ListDraftsRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ListDraftsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ListDraftsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ListDraftsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ListDraftsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ListDraftsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ListDraftsRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ListDraftsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ListDraftsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ListDraftsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ListDraftsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ListDraftsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.visionai.v1.ListDraftsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for requesting list of Drafts.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.ListDraftsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.ListDraftsRequest)
com.google.cloud.visionai.v1.ListDraftsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.PlatformProto
.internal_static_google_cloud_visionai_v1_ListDraftsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.PlatformProto
.internal_static_google_cloud_visionai_v1_ListDraftsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.ListDraftsRequest.class,
com.google.cloud.visionai.v1.ListDraftsRequest.Builder.class);
}
// Construct using com.google.cloud.visionai.v1.ListDraftsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
orderBy_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.visionai.v1.PlatformProto
.internal_static_google_cloud_visionai_v1_ListDraftsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ListDraftsRequest getDefaultInstanceForType() {
return com.google.cloud.visionai.v1.ListDraftsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.visionai.v1.ListDraftsRequest build() {
com.google.cloud.visionai.v1.ListDraftsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ListDraftsRequest buildPartial() {
com.google.cloud.visionai.v1.ListDraftsRequest result =
new com.google.cloud.visionai.v1.ListDraftsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.visionai.v1.ListDraftsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.orderBy_ = orderBy_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.visionai.v1.ListDraftsRequest) {
return mergeFrom((com.google.cloud.visionai.v1.ListDraftsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.visionai.v1.ListDraftsRequest other) {
if (other == com.google.cloud.visionai.v1.ListDraftsRequest.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
if (!other.getOrderBy().isEmpty()) {
orderBy_ = other.orderBy_;
bitField0_ |= 0x00000010;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
case 42:
{
orderBy_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000010;
break;
} // case 42
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for ListDraftsRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListDraftsRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListDraftsRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for ListDraftsRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for ListDraftsRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Requested page size. Server may return fewer items than requested.
* If unspecified, server will pick an appropriate default.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Filtering results.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
private java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderBy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearOrderBy() {
orderBy_ = getDefaultInstance().getOrderBy();
bitField0_ = (bitField0_ & ~0x00000010);
onChanged();
return this;
}
/**
*
*
* <pre>
* Hint for how to order the results.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The bytes for orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderByBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
orderBy_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.ListDraftsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.ListDraftsRequest)
private static final com.google.cloud.visionai.v1.ListDraftsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.ListDraftsRequest();
}
public static com.google.cloud.visionai.v1.ListDraftsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDraftsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListDraftsRequest>() {
@java.lang.Override
public ListDraftsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDraftsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDraftsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ListDraftsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,726 | java-bigquerymigration/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/TranslateQueryRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/migration/v2alpha/translation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.migration.v2alpha;
/**
*
*
* <pre>
* The request of translating a SQL query to Standard SQL.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest}
*/
public final class TranslateQueryRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest)
TranslateQueryRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use TranslateQueryRequest.newBuilder() to construct.
private TranslateQueryRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TranslateQueryRequest() {
parent_ = "";
sourceDialect_ = 0;
query_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TranslateQueryRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.migration.v2alpha.TranslationServiceProto
.internal_static_google_cloud_bigquery_migration_v2alpha_TranslateQueryRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.migration.v2alpha.TranslationServiceProto
.internal_static_google_cloud_bigquery_migration_v2alpha_TranslateQueryRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest.class,
com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest.Builder.class);
}
/**
*
*
* <pre>
* Supported SQL translation source dialects.
* </pre>
*
* Protobuf enum {@code
* google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest.SqlTranslationSourceDialect}
*/
public enum SqlTranslationSourceDialect implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* SqlTranslationSourceDialect not specified.
* </pre>
*
* <code>SQL_TRANSLATION_SOURCE_DIALECT_UNSPECIFIED = 0;</code>
*/
SQL_TRANSLATION_SOURCE_DIALECT_UNSPECIFIED(0),
/**
*
*
* <pre>
* Teradata SQL.
* </pre>
*
* <code>TERADATA = 1;</code>
*/
TERADATA(1),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* SqlTranslationSourceDialect not specified.
* </pre>
*
* <code>SQL_TRANSLATION_SOURCE_DIALECT_UNSPECIFIED = 0;</code>
*/
public static final int SQL_TRANSLATION_SOURCE_DIALECT_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Teradata SQL.
* </pre>
*
* <code>TERADATA = 1;</code>
*/
public static final int TERADATA_VALUE = 1;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static SqlTranslationSourceDialect valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static SqlTranslationSourceDialect forNumber(int value) {
switch (value) {
case 0:
return SQL_TRANSLATION_SOURCE_DIALECT_UNSPECIFIED;
case 1:
return TERADATA;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<SqlTranslationSourceDialect>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<SqlTranslationSourceDialect>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<SqlTranslationSourceDialect>() {
public SqlTranslationSourceDialect findValueByNumber(int number) {
return SqlTranslationSourceDialect.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final SqlTranslationSourceDialect[] VALUES = values();
public static SqlTranslationSourceDialect valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private SqlTranslationSourceDialect(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest.SqlTranslationSourceDialect)
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the project to which this translation request belongs.
* Example: `projects/foo/locations/bar`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the project to which this translation request belongs.
* Example: `projects/foo/locations/bar`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SOURCE_DIALECT_FIELD_NUMBER = 2;
private int sourceDialect_ = 0;
/**
*
*
* <pre>
* Required. The source SQL dialect of `queries`.
* </pre>
*
* <code>
* .google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest.SqlTranslationSourceDialect source_dialect = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The enum numeric value on the wire for sourceDialect.
*/
@java.lang.Override
public int getSourceDialectValue() {
return sourceDialect_;
}
/**
*
*
* <pre>
* Required. The source SQL dialect of `queries`.
* </pre>
*
* <code>
* .google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest.SqlTranslationSourceDialect source_dialect = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The sourceDialect.
*/
@java.lang.Override
public com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest
.SqlTranslationSourceDialect
getSourceDialect() {
com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest.SqlTranslationSourceDialect
result =
com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest
.SqlTranslationSourceDialect.forNumber(sourceDialect_);
return result == null
? com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest
.SqlTranslationSourceDialect.UNRECOGNIZED
: result;
}
public static final int QUERY_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object query_ = "";
/**
*
*
* <pre>
* Required. The query to be translated.
* </pre>
*
* <code>string query = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The query.
*/
@java.lang.Override
public java.lang.String getQuery() {
java.lang.Object ref = query_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
query_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The query to be translated.
* </pre>
*
* <code>string query = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for query.
*/
@java.lang.Override
public com.google.protobuf.ByteString getQueryBytes() {
java.lang.Object ref = query_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
query_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (sourceDialect_
!= com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest
.SqlTranslationSourceDialect.SQL_TRANSLATION_SOURCE_DIALECT_UNSPECIFIED
.getNumber()) {
output.writeEnum(2, sourceDialect_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, query_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (sourceDialect_
!= com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest
.SqlTranslationSourceDialect.SQL_TRANSLATION_SOURCE_DIALECT_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, sourceDialect_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, query_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest)) {
return super.equals(obj);
}
com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest other =
(com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (sourceDialect_ != other.sourceDialect_) return false;
if (!getQuery().equals(other.getQuery())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + SOURCE_DIALECT_FIELD_NUMBER;
hash = (53 * hash) + sourceDialect_;
hash = (37 * hash) + QUERY_FIELD_NUMBER;
hash = (53 * hash) + getQuery().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request of translating a SQL query to Standard SQL.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest)
com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.migration.v2alpha.TranslationServiceProto
.internal_static_google_cloud_bigquery_migration_v2alpha_TranslateQueryRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.migration.v2alpha.TranslationServiceProto
.internal_static_google_cloud_bigquery_migration_v2alpha_TranslateQueryRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest.class,
com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest.Builder.class);
}
// Construct using
// com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
sourceDialect_ = 0;
query_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.migration.v2alpha.TranslationServiceProto
.internal_static_google_cloud_bigquery_migration_v2alpha_TranslateQueryRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest
getDefaultInstanceForType() {
return com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest build() {
com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest buildPartial() {
com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest result =
new com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.sourceDialect_ = sourceDialect_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.query_ = query_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest) {
return mergeFrom((com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest other) {
if (other
== com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.sourceDialect_ != 0) {
setSourceDialectValue(other.getSourceDialectValue());
}
if (!other.getQuery().isEmpty()) {
query_ = other.query_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
sourceDialect_ = input.readEnum();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
query_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the project to which this translation request belongs.
* Example: `projects/foo/locations/bar`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the project to which this translation request belongs.
* Example: `projects/foo/locations/bar`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the project to which this translation request belongs.
* Example: `projects/foo/locations/bar`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the project to which this translation request belongs.
* Example: `projects/foo/locations/bar`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the project to which this translation request belongs.
* Example: `projects/foo/locations/bar`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int sourceDialect_ = 0;
/**
*
*
* <pre>
* Required. The source SQL dialect of `queries`.
* </pre>
*
* <code>
* .google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest.SqlTranslationSourceDialect source_dialect = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The enum numeric value on the wire for sourceDialect.
*/
@java.lang.Override
public int getSourceDialectValue() {
return sourceDialect_;
}
/**
*
*
* <pre>
* Required. The source SQL dialect of `queries`.
* </pre>
*
* <code>
* .google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest.SqlTranslationSourceDialect source_dialect = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param value The enum numeric value on the wire for sourceDialect to set.
* @return This builder for chaining.
*/
public Builder setSourceDialectValue(int value) {
sourceDialect_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The source SQL dialect of `queries`.
* </pre>
*
* <code>
* .google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest.SqlTranslationSourceDialect source_dialect = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The sourceDialect.
*/
@java.lang.Override
public com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest
.SqlTranslationSourceDialect
getSourceDialect() {
com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest.SqlTranslationSourceDialect
result =
com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest
.SqlTranslationSourceDialect.forNumber(sourceDialect_);
return result == null
? com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest
.SqlTranslationSourceDialect.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Required. The source SQL dialect of `queries`.
* </pre>
*
* <code>
* .google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest.SqlTranslationSourceDialect source_dialect = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param value The sourceDialect to set.
* @return This builder for chaining.
*/
public Builder setSourceDialect(
com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest
.SqlTranslationSourceDialect
value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
sourceDialect_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The source SQL dialect of `queries`.
* </pre>
*
* <code>
* .google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest.SqlTranslationSourceDialect source_dialect = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearSourceDialect() {
bitField0_ = (bitField0_ & ~0x00000002);
sourceDialect_ = 0;
onChanged();
return this;
}
private java.lang.Object query_ = "";
/**
*
*
* <pre>
* Required. The query to be translated.
* </pre>
*
* <code>string query = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The query.
*/
public java.lang.String getQuery() {
java.lang.Object ref = query_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
query_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The query to be translated.
* </pre>
*
* <code>string query = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for query.
*/
public com.google.protobuf.ByteString getQueryBytes() {
java.lang.Object ref = query_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
query_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The query to be translated.
* </pre>
*
* <code>string query = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The query to set.
* @return This builder for chaining.
*/
public Builder setQuery(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
query_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The query to be translated.
* </pre>
*
* <code>string query = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearQuery() {
query_ = getDefaultInstance().getQuery();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The query to be translated.
* </pre>
*
* <code>string query = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for query to set.
* @return This builder for chaining.
*/
public Builder setQueryBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
query_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest)
private static final com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest();
}
public static com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TranslateQueryRequest> PARSER =
new com.google.protobuf.AbstractParser<TranslateQueryRequest>() {
@java.lang.Override
public TranslateQueryRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TranslateQueryRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TranslateQueryRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.migration.v2alpha.TranslateQueryRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,756 | java-filestore/proto-google-cloud-filestore-v1beta1/src/main/java/com/google/cloud/filestore/v1beta1/CreateSnapshotRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/filestore/v1beta1/cloud_filestore_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.filestore.v1beta1;
/**
*
*
* <pre>
* CreateSnapshotRequest creates a snapshot.
* </pre>
*
* Protobuf type {@code google.cloud.filestore.v1beta1.CreateSnapshotRequest}
*/
public final class CreateSnapshotRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.filestore.v1beta1.CreateSnapshotRequest)
CreateSnapshotRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateSnapshotRequest.newBuilder() to construct.
private CreateSnapshotRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateSnapshotRequest() {
parent_ = "";
snapshotId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateSnapshotRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.filestore.v1beta1.CloudFilestoreServiceProto
.internal_static_google_cloud_filestore_v1beta1_CreateSnapshotRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.filestore.v1beta1.CloudFilestoreServiceProto
.internal_static_google_cloud_filestore_v1beta1_CreateSnapshotRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.filestore.v1beta1.CreateSnapshotRequest.class,
com.google.cloud.filestore.v1beta1.CreateSnapshotRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The Filestore Instance to create the snapshots of, in the format
* `projects/{project_id}/locations/{location}/instances/{instance_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The Filestore Instance to create the snapshots of, in the format
* `projects/{project_id}/locations/{location}/instances/{instance_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SNAPSHOT_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object snapshotId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the snapshot.
* The ID must be unique within the specified instance.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string snapshot_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The snapshotId.
*/
@java.lang.Override
public java.lang.String getSnapshotId() {
java.lang.Object ref = snapshotId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
snapshotId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the snapshot.
* The ID must be unique within the specified instance.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string snapshot_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for snapshotId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSnapshotIdBytes() {
java.lang.Object ref = snapshotId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
snapshotId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SNAPSHOT_FIELD_NUMBER = 3;
private com.google.cloud.filestore.v1beta1.Snapshot snapshot_;
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the snapshot field is set.
*/
@java.lang.Override
public boolean hasSnapshot() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The snapshot.
*/
@java.lang.Override
public com.google.cloud.filestore.v1beta1.Snapshot getSnapshot() {
return snapshot_ == null
? com.google.cloud.filestore.v1beta1.Snapshot.getDefaultInstance()
: snapshot_;
}
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.filestore.v1beta1.SnapshotOrBuilder getSnapshotOrBuilder() {
return snapshot_ == null
? com.google.cloud.filestore.v1beta1.Snapshot.getDefaultInstance()
: snapshot_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(snapshotId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, snapshotId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getSnapshot());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(snapshotId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, snapshotId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getSnapshot());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.filestore.v1beta1.CreateSnapshotRequest)) {
return super.equals(obj);
}
com.google.cloud.filestore.v1beta1.CreateSnapshotRequest other =
(com.google.cloud.filestore.v1beta1.CreateSnapshotRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getSnapshotId().equals(other.getSnapshotId())) return false;
if (hasSnapshot() != other.hasSnapshot()) return false;
if (hasSnapshot()) {
if (!getSnapshot().equals(other.getSnapshot())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + SNAPSHOT_ID_FIELD_NUMBER;
hash = (53 * hash) + getSnapshotId().hashCode();
if (hasSnapshot()) {
hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER;
hash = (53 * hash) + getSnapshot().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.filestore.v1beta1.CreateSnapshotRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.filestore.v1beta1.CreateSnapshotRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.filestore.v1beta1.CreateSnapshotRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.filestore.v1beta1.CreateSnapshotRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.filestore.v1beta1.CreateSnapshotRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.filestore.v1beta1.CreateSnapshotRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.filestore.v1beta1.CreateSnapshotRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.filestore.v1beta1.CreateSnapshotRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.filestore.v1beta1.CreateSnapshotRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.filestore.v1beta1.CreateSnapshotRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.filestore.v1beta1.CreateSnapshotRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.filestore.v1beta1.CreateSnapshotRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.filestore.v1beta1.CreateSnapshotRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* CreateSnapshotRequest creates a snapshot.
* </pre>
*
* Protobuf type {@code google.cloud.filestore.v1beta1.CreateSnapshotRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.filestore.v1beta1.CreateSnapshotRequest)
com.google.cloud.filestore.v1beta1.CreateSnapshotRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.filestore.v1beta1.CloudFilestoreServiceProto
.internal_static_google_cloud_filestore_v1beta1_CreateSnapshotRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.filestore.v1beta1.CloudFilestoreServiceProto
.internal_static_google_cloud_filestore_v1beta1_CreateSnapshotRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.filestore.v1beta1.CreateSnapshotRequest.class,
com.google.cloud.filestore.v1beta1.CreateSnapshotRequest.Builder.class);
}
// Construct using com.google.cloud.filestore.v1beta1.CreateSnapshotRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSnapshotFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
snapshotId_ = "";
snapshot_ = null;
if (snapshotBuilder_ != null) {
snapshotBuilder_.dispose();
snapshotBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.filestore.v1beta1.CloudFilestoreServiceProto
.internal_static_google_cloud_filestore_v1beta1_CreateSnapshotRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.filestore.v1beta1.CreateSnapshotRequest getDefaultInstanceForType() {
return com.google.cloud.filestore.v1beta1.CreateSnapshotRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.filestore.v1beta1.CreateSnapshotRequest build() {
com.google.cloud.filestore.v1beta1.CreateSnapshotRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.filestore.v1beta1.CreateSnapshotRequest buildPartial() {
com.google.cloud.filestore.v1beta1.CreateSnapshotRequest result =
new com.google.cloud.filestore.v1beta1.CreateSnapshotRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.filestore.v1beta1.CreateSnapshotRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.snapshotId_ = snapshotId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.snapshot_ = snapshotBuilder_ == null ? snapshot_ : snapshotBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.filestore.v1beta1.CreateSnapshotRequest) {
return mergeFrom((com.google.cloud.filestore.v1beta1.CreateSnapshotRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.filestore.v1beta1.CreateSnapshotRequest other) {
if (other == com.google.cloud.filestore.v1beta1.CreateSnapshotRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getSnapshotId().isEmpty()) {
snapshotId_ = other.snapshotId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasSnapshot()) {
mergeSnapshot(other.getSnapshot());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
snapshotId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getSnapshotFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The Filestore Instance to create the snapshots of, in the format
* `projects/{project_id}/locations/{location}/instances/{instance_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The Filestore Instance to create the snapshots of, in the format
* `projects/{project_id}/locations/{location}/instances/{instance_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The Filestore Instance to create the snapshots of, in the format
* `projects/{project_id}/locations/{location}/instances/{instance_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Filestore Instance to create the snapshots of, in the format
* `projects/{project_id}/locations/{location}/instances/{instance_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Filestore Instance to create the snapshots of, in the format
* `projects/{project_id}/locations/{location}/instances/{instance_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object snapshotId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the snapshot.
* The ID must be unique within the specified instance.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string snapshot_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The snapshotId.
*/
public java.lang.String getSnapshotId() {
java.lang.Object ref = snapshotId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
snapshotId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the snapshot.
* The ID must be unique within the specified instance.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string snapshot_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for snapshotId.
*/
public com.google.protobuf.ByteString getSnapshotIdBytes() {
java.lang.Object ref = snapshotId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
snapshotId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the snapshot.
* The ID must be unique within the specified instance.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string snapshot_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The snapshotId to set.
* @return This builder for chaining.
*/
public Builder setSnapshotId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
snapshotId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the snapshot.
* The ID must be unique within the specified instance.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string snapshot_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearSnapshotId() {
snapshotId_ = getDefaultInstance().getSnapshotId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the snapshot.
* The ID must be unique within the specified instance.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string snapshot_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for snapshotId to set.
* @return This builder for chaining.
*/
public Builder setSnapshotIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
snapshotId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.filestore.v1beta1.Snapshot snapshot_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.filestore.v1beta1.Snapshot,
com.google.cloud.filestore.v1beta1.Snapshot.Builder,
com.google.cloud.filestore.v1beta1.SnapshotOrBuilder>
snapshotBuilder_;
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the snapshot field is set.
*/
public boolean hasSnapshot() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The snapshot.
*/
public com.google.cloud.filestore.v1beta1.Snapshot getSnapshot() {
if (snapshotBuilder_ == null) {
return snapshot_ == null
? com.google.cloud.filestore.v1beta1.Snapshot.getDefaultInstance()
: snapshot_;
} else {
return snapshotBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setSnapshot(com.google.cloud.filestore.v1beta1.Snapshot value) {
if (snapshotBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
snapshot_ = value;
} else {
snapshotBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setSnapshot(
com.google.cloud.filestore.v1beta1.Snapshot.Builder builderForValue) {
if (snapshotBuilder_ == null) {
snapshot_ = builderForValue.build();
} else {
snapshotBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeSnapshot(com.google.cloud.filestore.v1beta1.Snapshot value) {
if (snapshotBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& snapshot_ != null
&& snapshot_ != com.google.cloud.filestore.v1beta1.Snapshot.getDefaultInstance()) {
getSnapshotBuilder().mergeFrom(value);
} else {
snapshot_ = value;
}
} else {
snapshotBuilder_.mergeFrom(value);
}
if (snapshot_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearSnapshot() {
bitField0_ = (bitField0_ & ~0x00000004);
snapshot_ = null;
if (snapshotBuilder_ != null) {
snapshotBuilder_.dispose();
snapshotBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.filestore.v1beta1.Snapshot.Builder getSnapshotBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getSnapshotFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.filestore.v1beta1.SnapshotOrBuilder getSnapshotOrBuilder() {
if (snapshotBuilder_ != null) {
return snapshotBuilder_.getMessageOrBuilder();
} else {
return snapshot_ == null
? com.google.cloud.filestore.v1beta1.Snapshot.getDefaultInstance()
: snapshot_;
}
}
/**
*
*
* <pre>
* Required. A snapshot resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.filestore.v1beta1.Snapshot,
com.google.cloud.filestore.v1beta1.Snapshot.Builder,
com.google.cloud.filestore.v1beta1.SnapshotOrBuilder>
getSnapshotFieldBuilder() {
if (snapshotBuilder_ == null) {
snapshotBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.filestore.v1beta1.Snapshot,
com.google.cloud.filestore.v1beta1.Snapshot.Builder,
com.google.cloud.filestore.v1beta1.SnapshotOrBuilder>(
getSnapshot(), getParentForChildren(), isClean());
snapshot_ = null;
}
return snapshotBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.filestore.v1beta1.CreateSnapshotRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.filestore.v1beta1.CreateSnapshotRequest)
private static final com.google.cloud.filestore.v1beta1.CreateSnapshotRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.filestore.v1beta1.CreateSnapshotRequest();
}
public static com.google.cloud.filestore.v1beta1.CreateSnapshotRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateSnapshotRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateSnapshotRequest>() {
@java.lang.Override
public CreateSnapshotRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateSnapshotRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateSnapshotRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.filestore.v1beta1.CreateSnapshotRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/tapestry-5 | 36,430 | tapestry-core/src/test/java/org/apache/tapestry5/integration/app1/FormTests.java | // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.integration.app1;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.apache.tapestry5.corelib.components.Form;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.ui.ExpectedCondition;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.testng.annotations.Test;
/**
* Tests for the {@link Form} component as well as many form control components.
*/
public class FormTests extends App1TestCase
{
@Test
public void form_encoding_type()
{
openLinks("Form Encoding Type");
assertAttribute("//form/@enctype", "x-override");
}
@Test
public void page_context_in_form()
{
openLinks("Page Context in Form");
assertTextSeries("//div[@id='results']//li[%d]", 1, "betty", "wilma", "context with spaces", "context/with/slashes");
assertFieldValue("t:ac", "betty/wilma/context$0020with$0020spaces/context$002fwith$002fslashes");
clickAndWait(SUBMIT);
assertTextSeries("//div[@id='results']//li[%d]", 1, "betty", "wilma", "context with spaces", "context/with/slashes");
assertFieldValue("t:ac", "betty/wilma/context$0020with$0020spaces/context$002fwith$002fslashes");
}
@Test
public void password_field()
{
openLinks("PasswordFieldDemo");
type("userName", "howard");
type("password", "wrong-password");
clickAndWait(SUBMIT);
assertFieldValue("userName", "howard");
// Verify that password fields do not render a non-blank password, even
// when it is known.
assertFieldValue("password", "");
assertTextPresent("[howard]");
assertTextPresent("[wrong-password]");
type("password", "tapestry");
clickAndWait(SUBMIT);
assertTextPresent("You have provided the correct user name and password.");
}
@Test
public void server_side_validation_for_textfield_and_textarea() throws Exception
{
openLinks("ValidForm");
clickAndWait(SUBMIT);
assertTextPresent("You must provide a value for Email.");
// is an overridden validation error message:
assertTextPresent("Please provide a detailed description of the incident.");
type("email", "foo@bar.baz");
type("message", "Show me the money!");
type("hours", "foo");
clickAndWait(SUBMIT);
assertTextPresent("[false]");
assertTextPresent("You must provide an integer value for Hours.");
assertAttribute("//input[@id='hours']/@value", "foo");
type("hours", " 19 ");
click("//input[@id='urgent']");
clickAndWait(SUBMIT);
assertTextPresent("[foo@bar.baz]");
assertTextPresent("[Show me the money!]");
assertTextPresent("[true]");
assertTextPresent("[19]");
}
@Test
public void client_side_validation()
{
openLinks("Client Validation Demo");
clickAndWait("link=Reset Page State");
// Notice: click, not click and wait.
click(SUBMIT);
assertTextPresent("You must provide a value for First Name.");
type("firstName", "Howard");
type("lastName", "Lewis Ship");
type("birthYear", "1000");
type("password", "supersecret");
click(SUBMIT);
type("birthYear", "1966");
click("citizen");
clickAndWait(SUBMIT);
assertTextPresent("Howard", "Lewis Ship", "1966", "U.S. Citizen");
}
@Test
public void cancel_button()
{
openLinks("Client Validation Demo");
// Used to ensure that the <script> tag was present, but that's hard to
// do with script combining enabled.
clickAndWait("link=Clear Data");
clickAndWait("//input[@value='Cancel']");
assertText("message", "Form was cancelled.");
}
@Test
public void radio_group_validator()
{
openLinks("RadioDemo", "reset");
// Verify that the "required" validator works.
clickAndWait(SUBMIT);
assertTextPresent("You must provide a value for Department.");
}
// This test has been disabled because the use of the pattern attribute
// by the Regexp validator, prevents the form from submitting.
@Test(enabled=false)
public void regexp_validator()
{
openLinks("Regexp Demo");
String update = SUBMIT;
type("zipCode", "abc");
click(update); // but don't wait
assertTextPresent("A zip code consists of five or nine digits, eg: 02134 or 90125-4472.");
type("zipCode", "12345");
clickAndWait(update);
assertTextPresent("Zip code: [12345]");
type("zipCode", "12345-9876");
clickAndWait(update);
assertTextPresent("Zip code: [12345-9876]");
}
@Test
public void basic_datefield()
{
openLinks("DateField Demo", "Reset Page State", "english");
type("birthday", "24 dec 1966");
type("asteroidImpact", "05/28/2046");
clickAndWait(SUBMIT);
assertText("birthday-output", "12/24/1966");
assertText("impact-output", "05/28/2046");
assertFieldValue("birthday", "24 Dec 1966");
assertFieldValue("asteroidImpact", "5/28/2046");
clickAndWait("link=french");
click("css=.x-birthday .btn");
waitForAjaxRequestsToComplete();
assertText("//A[@class='topLabel']", "1966 d\u00e9cembre");
}
// TAP5-2197
@Test
public void datefield_leniency()
{
openLinks("DateField Demo", "Reset Page State", "english");
type("asteroidImpact", "00/00/0000");
type("lenient", "00/00/0000");
clickAndWait(SUBMIT);
// By default, DateField is not lenient anymore
assertText("css=div.x-impact p.help-block", "Date value '00/00/0000' is not parseable.");
// But this one is configured as such by setting the "lenient" parameter to true.
assertFalse(isElementPresent("css=div.x-lenient p.help-block"));
// Check whether a String coerced to a DateFormat results in a lenient or not instance
// according to the SymbolConstants.LENIENT_DATE_FORMAT symbol (default false)
assertText("coercedStringToDateFormatLenient", "false");
}
// TAP5-1057
@Test
public void xss_datefield()
{
openLinks("DateField Demo", "Reset Page State", "english");
type("asteroidImpact", "<script>alert('T5 is great'); </script>");
click("css=.x-impact .btn");
waitForAjaxRequestsToComplete();
assertSourcePresent("Unparseable date: \"<script>alert('T5 is great'); </script>\"");
}
// TAP5-1409
@Test
public void datefield_select_newmonth_samedate()
{
openLinks("DateField Demo", "Reset Page State", "english");
//start with a known date...
type("asteroidImpact", "05/28/2035");
click("css=.x-impact .btn");
ExpectedCondition datePickerVisible = ExpectedConditions.visibilityOfElementLocated(By.cssSelector("div.datePicker"));
waitForCondition(datePickerVisible);
assertEquals(getText("css=td.selected"), "28");
//move to the next month.
click("css=button.nextButton");
//first, make sure that NOTHING shows as selected! The selected date is still 5/28/46
waitForSelectedToBeRemoved();
//make sure it's still selected if we navigate back...
click("css=button.previousButton");
waitForCssSelectorToAppear("td.selected");
click("css=button.nextButton");
waitForSelectedToBeRemoved();
click("xpath=//td[text()='28']");
String pickerGoneSelector = "css=div.datePicker";
waitForInvisible(pickerGoneSelector);
assertFieldValue("asteroidImpact", "6/28/2035");
//a few other behaviors to check on as a side-effect of implementing the fix for 1409:
//1) If today is selected and it's the current month, pressing the "Today" button should close the popup
//2) If today is selected and we're on some other month, pressing the "Today" button should just take us
// back to the today.
//3) If today is not selected, pressing the "Today" button should set the date and close the popup.
//4) Pressing the "None" button should always close the popup and result in no date.
//#3
click("css=.x-impact .btn");
waitForCondition(datePickerVisible);
click("css=div.datePicker .footerTable button");
waitForInvisible(pickerGoneSelector);
String value = getValue("asteroidImpact");
assertFieldValue("asteroidImpact", new SimpleDateFormat("M/d/yyyy").format(new Date()));
//#2...
click("css=.x-impact .btn");
waitForCondition(datePickerVisible);
click("css=button.nextButton");
waitForSelectedToBeRemoved();
click("css=div.datePicker .footerTable button");
waitForCssSelectorToAppear("td.selected");
//#1
click("css=div.datePicker .footerTable button");
waitForInvisible(pickerGoneSelector);
assertFieldValue("asteroidImpact", value);
//#4...
click("css=.x-impact .btn");
waitForCondition(datePickerVisible);
String noneButton = "//button[text()='None']";
click(noneButton);
waitForInvisible(pickerGoneSelector);
assertFieldValue("asteroidImpact", "");
click("css=.x-impact .btn");
waitForCondition(datePickerVisible);
assertFalse(isElementPresent("css=td.selected"));
click(noneButton);
waitForInvisible(pickerGoneSelector);
assertFieldValue("asteroidImpact", "");
}
private void waitForSelectedToBeRemoved()
{
waitForCondition("selenium.browserbot.getCurrentWindow().testSupport.findCSSMatchCount('td.selected') == 0", PAGE_LOAD_TIMEOUT);
}
// TAP5-1408, TAP5-2203
@Test
public void datefield_clickoutside_closes()
{
openLinks("DateField Demo", "Reset Page State", "english");
type("asteroidImpact", "05/28/2046");
click("css=.x-impact .btn");
ExpectedCondition datePickerVisible = ExpectedConditions.visibilityOfElementLocated(By.cssSelector("div.datePicker"));
waitForCondition(datePickerVisible);
click("css=.x-impact .btn");
waitForInvisible("css=div.datePicker");
//make sure that clicking somewhere outside the date picker
//closes it
click("css=.x-impact .btn");
waitForCondition(datePickerVisible);
click("css=h1");
waitForInvisible("css=div.datePicker");
//also make sure that clicking the month label /doesn't/ close the picker
click("css=.x-impact .btn");
waitForCondition(datePickerVisible);
click("css=a.topLabel");
waitForCssSelectorToAppear("div.labelPopup");
click("css=div.labelPopup a");
waitForInvisible("css=div.labelPopup");
//It's basically impossible to express "wait until the popup doesn't disappear"
//Instead, we take advantage of knowing that the datepicker disappears with this bug /almost/
//immediately after picking the month label, so we sleep the test for a few seconds to provide
//ammple time for the bug to manifest.
waitForAjaxRequestsToComplete();
assertTrue(isVisible("css=div.datePicker"));
}
@Test
public void event_based_translate() throws Exception
{
openLinks("EventMethod Translator");
type("count", "123");
clickAndWait(SUBMIT);
assertTextPresent("Count: [123]");
type("count", "0");
clickAndWait(SUBMIT);
assertTextPresent("Count: [0]");
assertFieldValue("count", "zero");
type("count", "456");
clickAndWait(SUBMIT);
assertTextPresent("Count: [456]");
assertFieldValue("count", "456");
type("count", "ZERO");
clickAndWait(SUBMIT);
assertTextPresent("Count: [0]");
assertFieldValue("count", "zero");
// Try the server-side custom exception reporting.
type("count", "13");
clickAndWait(SUBMIT);
assertTextPresent("Event Handler Method Translate", "Thirteen is an unlucky number.");
type("count", "i");
clickAndWait(SUBMIT);
assertTextPresent("Event Handler Method Translate", "Rational numbers only, please.");
}
@Test
public void radio_button_and_group()
{
openLinks("RadioDemo");
String update = SUBMIT;
webDriver.findElements(By.cssSelector("label")).stream().filter(element-> element.getText().contains("Accounting")).findFirst().get().click();
clickAndWait(update);
assertTextPresent("Selected department: ACCOUNTING");
WebElement salesAndMarketing = webDriver.findElements(By.cssSelector("label")).stream().filter(element-> element.getText().contains("Sales And Marketin")).findFirst().get();
scrollIntoView(salesAndMarketing);
salesAndMarketing.click();
clickAndWait(update);
assertTextPresent("Selected department: SALES_AND_MARKETING");
// not in a loop ...
WebElement temp = webDriver.findElements(By.cssSelector("label")).stream().filter(element-> element.getText().contains("Temp")).findFirst().get();
scrollIntoView(temp);
temp.click();
clickAndWait(update);
assertTextPresent("Selected position: TEMP");
WebElement lifer = webDriver.findElements(By.cssSelector("label")).stream().filter(element-> element.getText().contains("Lifer")).findFirst().get();
scrollIntoView(lifer);
lifer.click();
clickAndWait(update);
assertTextPresent("Selected position: LIFER");
}
@Test
public void disabled_fields() throws Exception
{
openLinks("Disabled Fields");
// The couple of places where there's a _0 suffix is related to
// the fix for https://issues.apache.org/jira/browse/TAP5-1632
String[] paths = new String[]
{"//input[@id='textfield']",
"//input[@id='passwordfield']",
"//textarea[@id='textarea']",
"//input[@id='checkbox']",
"//select[@id='select_0']",
"//input[@id='radio1']",
"//input[@id='radio2']",
"//input[@id='datefield']",
"//div[@class='palette']//input[@type='hidden']",
"//div[@class='palette-available']//select",
"//div[@class='palette-selected']//select",
"//input[@name='checklist']",
// TAP5-2078
"//input[@name='radiogroup']",
"//input[@id='submit_0']"};
for (String path : paths)
{
String locator = String.format("%s/@disabled", path);
assertAttribute(locator, "disabled");
}
assertAttribute("css=div.palette .btn@disabled", "true");
//TAP5-2078
clickAndWait("//input[@value='Continue']");
assertFalse(isTextPresent("This should not happen"));
}
/**
* TAPESTRY-2056
*/
@Test
public void null_field_strategy()
{
openLinks("Null Field Strategy Demo");
String locator = "//span[@id='value']";
assertText(locator, "");
assertAttribute("//input[@id='number']/@value", "0");
type("number", "");
clickAndWait(SUBMIT);
assertText(locator, "0");
}
/**
* TAPESTRY-1647
*/
@Test
public void label_invokes_validation_decorator_at_correct_time()
{
openLinks("Override Validation Decorator");
// This is sub-optimal, as it doesn't esnure that the before/after field
// values really do wrap around
// the field (they do, but that's hard to prove!). It is also susceptible to
// idiosyncrasies around how Tapestry renders attributes, and how the browser
// represents them.
// Along the way we are also testing:
// - primitive types are automatically required
// - AbstractTextField.isRequired() and the logic inside
// ComponentFieldValidator.isRequired()
assertSourcePresent(
"[Before label for Value]<label for=\"value\" id=\"value-label\" class=\"control-label\">Value</label>[After label for Value]",
"[Before field Value]",
"[After field Value (optional)]",
"[Before label for Required Value]<label for=\"requiredValue\" id=\"requiredValue-label\" class=\"control-label\">Required Value</label>[After label for Required Value]",
"[Before field Required Value]", "[After field Required Value (required)]");
}
/**
* TAPESTRY-2085
*/
@Test
public void wrapper_types_with_text_field()
{
openLinks("TextField Wrapper Types", "Reset Page State");
assertFieldValue("count", "");
assertText("value", "null");
type("count", "0");
clickAndWait(SUBMIT);
assertFieldValue("count", "0");
assertText("value", "0");
type("count", "1");
clickAndWait(SUBMIT);
assertFieldValue("count", "1");
assertText("value", "1");
clickAndWait("link=clear");
assertFieldValue("count", "");
assertText("value", "null");
}
@Test
public void submit_with_context()
{
openLinks("Submit With Context");
clickAndWait(SUBMIT);
assertTextPresent("Result: 10.14159");
}
/**
* TAPESTRY-2563
*/
@Test
public void form_action_via_get()
{
open(getBaseURL() + "validform.form", "true");
assertTextPresent("Forms require that the request method be POST and that the t:formdata query parameter have values.");
}
/**
* TAPESTRY-2352
*/
@Test
public void client_field_format_validation()
{
openLinks("Client Format Validation");
type("amount", "abc");
type("quantity", "abc");
click(SUBMIT);
assertTextPresent("You must provide a numeric value for Amount.",
"Provide quantity as a number.");
}
/**
* TAPESTRY-2438
*/
@Test
public void validation_exception_thrown_from_validate_form_event_handler()
{
openLinks("ValidationForm ValidationException Demo");
clickAndWait(SUBMIT);
assertTextPresent("From event handler method.");
assertText("event", "failure");
}
@Test
public void form_field_outside_form()
{
openLinks("Form Field Outside Form");
assertTextPresent(
"org.apache.tapestry5.internal.services.RenderQueueException",
"Render queue error in SetupRender[FormFieldOutsideForm:textfield]: Component FormFieldOutsideForm:textfield must be enclosed by a Form component.",
"context:FormFieldOutsideForm.tml, line 5");
}
/**
* TAP5-281
*/
@Test
public void nested_form_check()
{
openLinks("Nested Form Demo");
assertTextPresent("Form components may not be placed inside other Form components.");
}
/**
* TAP5-87
*/
@Test
public void blank_password_does_not_update()
{
openLinks("Blank Password Demo");
type("password", "secret");
clickAndWait(SUBMIT);
assertFieldValue("password", "");
assertText("visiblepassword", "secret");
clickAndWait(SUBMIT);
assertFieldValue("password", "");
assertText("visiblepassword", "secret");
}
/**
* TAP5-228: And to think I almost blew off the integration tests!
*/
@Test
public void per_form_validation_messages_and_constraints()
{
openLinks("Per-Form Validation Messages");
clickAndWait("//input[@type='submit' and @value='Login']");
assertTextPresent("Enter the unique user id you provided when you registerred.");
type("userId", "aaa");
clickAndWait("//input[@type='submit' and @value='Login']");
assertTextPresent("You must provide at least 10 characters for User Id.");
clickAndWait("//input[@type='submit' and @value='Register']");
assertTextPresent("Enter a unique user id, such as your initials.");
type("userId_0", "aaa");
clickAndWait("//input[@type='submit' and @value='Register']");
assertTextPresent("You must provide at least 20 characters for User Id.");
}
/**
* TAP5-719
*/
@Test
public void link_submit_without_validator()
{
openLinks("LinkSubmit Without Validator Demo");
type("searchField", "Anders Haraldsson");
clickAndWait("//a[@id='searchLink']");
assertTextPresent("Result: Anders Haraldsson not found!");
}
/**
* TAP5-211
*/
@Test
public void client_side_numeric_validation()
{
openLinks("Client-Side Numeric Validation", "Reset Page State", "Setup Values");
assertText("outputLongValue", "1000");
assertText("outputDoubleValue", "1234.67");
assertFieldValue("longValue", "1000");
assertFieldValue("doubleValue", "1,234.67");
type("longValue", "2,000 ");
type("doubleValue", " -456,789.12");
clickAndWait(SUBMIT);
assertText("outputLongValue", "2000");
assertText("outputDoubleValue", "-456789.12");
assertFieldValue("longValue", "2000");
assertFieldValue("doubleValue", "-456,789.12");
clickAndWait("link=Switch to German");
assertText("outputLongValue", "2000");
assertText("outputDoubleValue", "-456789.12");
assertFieldValue("longValue", "2000");
assertFieldValue("doubleValue", "-456.789,12");
type("longValue", "3.000");
type("doubleValue", "5.444.333,22");
clickAndWait(SUBMIT);
assertFieldValue("longValue", "3000");
assertFieldValue("doubleValue", "5.444.333,22");
assertText("outputLongValue", "3000");
assertText("outputDoubleValue", "5444333.22");
clickAndWait("link=Setup Values");
type("longValue", "4000.");
click(SUBMIT);
assertTextPresent("You must provide an integer value for Long Value.");
type("doubleValue", "abc");
click(SUBMIT);
assertTextPresent("You must provide a numeric value for Double Value.");
}
@Test
public void client_validation_for_numeric_fields_that_are_not_required()
{
openLinks("Form Zone Demo");
type("longValue", "alpha");
click(SUBMIT);
assertTextPresent("You must provide an integer value for Long Value.");
type("longValue", "37");
click(SUBMIT);
waitForElementToAppear("outputvalue");
assertText("outputvalue", "37");
}
@Test
public void hidden_field()
{
openLinks("Hidden Demo", "setup");
clickAndWait(SUBMIT);
assertText("stored", "12345");
}
@Test
public void validation_constraints_from_messages()
{
openLinks("Validation Constraints From Messages");
click(SUBMIT);
assertTextPresent("You must provide a value for Name.");
assertTextPresent("You must provide a value for Age.");
type("name", "behemoth");
type("age", "0");
select("type", "label=Snake");
click(SUBMIT);
assertTextPresent("Age requires a value of at least 1.");
type("age", "121");
click(SUBMIT);
assertTextPresent("Age requires a value no larger than 120.");
type("age", "5");
clickAndWait(SUBMIT);
}
/**
* TAP5-157
*/
@Test
public void link_submit_component()
{
openLinks("LinkSubmit Demo");
click("//a[@id='fred']");
assertTextPresent("You must provide a value for Name.");
type("name", "Wilma");
clickAndWait("link=Fred");
assertText("name-value", "Wilma");
assertText("last-clicked", "Fred");
assertTextPresent("Result: 10.14159");
type("name", "Betty");
clickAndWait("link=Barney");
assertText("name-value", "Betty");
assertText("last-clicked", "Barney");
}
/**
* TAP5-2183
*/
@Test
public void link_submit_component_with_nested_element()
{
openLinks("LinkSubmit Demo");
type("name", "Wilma");
clickAndWait("css=.glyphicon-star");
assertText("name-value", "Wilma");
assertText("last-clicked", "Pebbles");
}
@Test
public void calendar_field_inside_bean_editor() throws InterruptedException
{
// openLinks("BeanEditor / Calendar Demo", "Reset Page State");
open("/beaneditcalendardemo");
clickAndWait("link=Reset Page State");
type("calendar", "04/06/1978");
Thread.sleep(1000); // Test seems to go too fast
clickAndWait(SUBMIT);
Thread.sleep(1000); // Test seems to go too fast
assertTextPresent("Apr 6, 1978");
// TAP5-1043
clickAndWait("link=clear");
}
@Test
public void image_submit_triggers_selected_event()
{
openLinks("Submit with an Image Demo");
type("value", "barney gumble");
clickAndWait("//input[@type='image']");
assertText("outputvalue", "barney gumble");
assertText("eventfired", "true");
}
/**
* Tests for forms and form submissions and basic form control components.
* also tests a few other things, such as
* computed default bindings and invisible instrumentation.
*/
@Test
public void simple_form()
{
openLinks("SimpleForm");
assertText("//label[@for='disabled']", "Disabled");
// This demonstrates TAPESTRY-1642:
assertText("//label[@for='email']", "User Email");
assertText("//label[@for='message']", "Incident Message");
assertText("//label[@for='operatingSystem']", "Operating System");
assertText("//label[@for='department']", "Department");
assertText("//label[@for='urgent']", "Urgent Processing Requested");
assertFieldValue("email", "");
assertFieldValue("message", "");
assertFieldValue("operatingSystem", "osx");
assertFieldValue("department", "");
assertFieldValue("urgent", "on");
clickAndWait(SUBMIT);
assertTextPresent("department: []");
type("email", "foo@bar.baz");
type("message", "Message for you, sir!");
select("operatingSystem", "Windows NT");
select("department", "R&D");
click("urgent");
clickAndWait(SUBMIT);
assertFieldValue("email", "foo@bar.baz");
assertFieldValue("message", "Message for you, sir!");
assertFieldValue("urgent", "off");
// Tried to use "email:" and "exact:email:" but Selenium 0.8.1 doesn't
// seem to accept that.
assertTextPresent("[foo@bar.baz]", "[Message for you, sir!]", "[false]", "[winnt]", "[RESEARCH_AND_DESIGN]");
// Haven't figured out how to get selenium to check that fields are
// disabled.
}
/**
* TAP5-915
*/
@Test
public void override_datefield_message_catalog()
{
open(getBaseURL() + "overridedatefieldmessagecatalogdemo");
type("birthday", "aaaaa");
clickAndWait("//input[@type='submit' and @value='Go']");
assertTextPresent("The input 'aaaaa' is not a valid date");
}
/**
* TAP5-52.
*/
@Test
public void single_error_message()
{
open(getBaseURL() + "singleerrordemo");
clickAndWait(SUBMIT);
assertTextPresent("You must provide a value for Username");
assertTextPresent("You must provide a value for Password");
type("username", "Igor");
clickAndWait(SUBMIT);
assertFalse(isTextPresent("You must provide a value for Username"));
assertTextPresent("You must provide a value for Password");
type("password", "secret");
clickAndWait(SUBMIT);
assertFalse(isTextPresent("You must provide a value for Username"));
assertFalse(isTextPresent("You must provide a value for Password"));
}
/**
* TAP5-1024
*/
@Test
public void use_of_cancel_mode_on_submit_button()
{
openLinks("Cancel Demo");
clickAndWait("//input[@value='Cancel Form']");
assertText("message", "Form was canceled.");
}
@Test
public void use_of_cancel_mode_with_submitlink()
{
openLinks("Cancel Demo");
clickAndWait("link=Cancel Form");
assertText("message", "Form was canceled.");
}
@Test
public void validation_decoration_for_select() throws Exception
{
openLinks("Select Demo");
clickAndWait(SUBMIT);
assertTextPresent("You must provide a value for Color.");
select("color", "label=Green");
clickAndWait(SUBMIT);
assertTextPresent("Selected color: Green");
}
/**
* TAP5-1098.
*/
@Test
public void create_select_model_from_objects_and_property_name() throws Exception
{
openLinks("SelectModel from objects and property name");
select("track", "label=The Calling");
clickAndWait(SUBMIT);
assertTextPresent("Selected track: The Calling, Synaesthetic");
}
@Test
public void create_select_model_from_objects() throws Exception
{
openLinks("SelectModel from objects");
select("track", "label=The Calling");
clickAndWait(SUBMIT);
assertTextPresent("Selected track: The Calling, Synaesthetic");
}
@Test
public void create_select_model_coercion() throws Exception
{
openLinks("SelectModel coercion");
waitForElementToAppear("track");
select("track", "label=The Calling");
clickAndWait(SUBMIT);
assertTextPresent("Selected track: The Calling, Synaesthetic");
}
@Test
public void validation_macro() throws Exception
{
openLinks("Validator Macro Demo");
clickAndWait(SUBMIT);
assertTextPresent("You must provide a value for Password.");
assertTextPresent("You must provide a value for Password2.");
type("password", "abcdefg");
type("password2", "x");
clickAndWait(SUBMIT);
assertTextPresent("You may provide at most 3 characters for Password.");
assertTextPresent("You must provide at least 2 characters for Password2.");
type("password", "a");
type("password2", "wxyz");
clickAndWait(SUBMIT);
assertTextPresent("You must provide at least 2 characters for Password.");
assertTextPresent("You may provide at most 3 characters for Password2.");
type("password", "ab");
type("password2", "xyz");
clickAndWait(SUBMIT);
assertTextPresent("Password: ab");
assertTextPresent("Password2: xyz");
}
@Test
public void checklist_select() throws Exception
{
openLinks("Checklist Demo");
clickAndWait(SUBMIT);
assertTextPresent("You must provide a value for Color.");
check("//input[@value='Green']");
clickAndWait(SUBMIT);
assertTextPresent("Selected colors: [Green]");
check("//input[@value='Red']");
clickAndWait(SUBMIT);
assertTextPresent("Selected colors: [Green, Red]");
check("//input[@value='Blue']");
uncheck("//input[@value='Green']");
clickAndWait(SUBMIT);
assertTextPresent("Selected colors: [Blue, Red]");
}
@Test
public void checkFormLinkParameters() throws Exception
{
openLinks("FormLinkParameters Demo");
assertAttribute("//input[@name='myparam']/@value", "!@#$%^&*()_+=");
clickAndWait("link=SetValue");
assertTextPresent("Result = '!@#$%^&*()_+='");
clickAndWait(SUBMIT);
assertTextPresent("Result = '!@#$%^&*()_+='");
}
/**
* TAP5-2223.
*/
@Test
public void optionGroup_form()
{
openLinks("OptionGroupForm Demo");
assertTextPresent("entity.id: [1]");
select("entity", "label2");
clickAndWait(SUBMIT);
assertTextPresent("entity.id: [2]");
}
/** TAP5-2331 */
@Test
public void form_fields_client_id_parameter()
{
final String[] clientIds = {"clientId-0", "clientId-1"};
openLinks("Form Field clientId Parameter Demo");
for (int i = 0; i < 4; i++) {
for (String clientId : clientIds)
{
assertTrue(selenium.isElementPresent(clientId));
}
click("updateZone");
waitForAjaxRequestsToComplete();
}
}
/** TAP5-2301 */
@Test
public void select_context() {
openLinks("MultiZone Update inside a Form");
selenium.select("selectValue1", "label=3 pre ajax");
waitForAjaxRequestsToComplete();
assertEquals(
"4 post ajax, number 013, retention policy RUNTIME",
selenium.getText("//select[@id='selectValue2']/option"));
}
/** TAP5-1815. In this webapp, HTML5 support is disabled, so we check whether it actually is disabled */
@Test
public void html5_support_disabled() throws Exception
{
openLinks("ValidForm");
assertEquals("text", getAttribute("emailValidator@type")); // if HTML5 support was enabled, this would be "email"
}
/** TAP5-736 **/
@Test
public void textfield_requires_non_null_validate_parameter() throws Exception
{
openLinks("TextField with null validate parameter");
if(isTextPresent("java.lang.NullPointerException")){
reportAndThrowAssertionError("Unexpected NullPointerException was thrown");
}
assertTextPresent("This parameter is not allowed to be null.");
}
/** TAP5-2467 **/
@Test
public void validate_in_error_event() {
openLinks("Validate in error Event");
click(SUBMIT);
waitForElementToAppear("validate-in-error");
assertTextPresent("Validate in error");
}
/** TAP5-2075 **/
@Test
public void validate_checkbox_must_be_checked()
{
openLinks("Validate Checkbox Must Be Checked");
clickAndWait(SUBMIT);
assertTextPresent("You must check Checkbox.");
check("//input[@type='checkbox']");
clickAndWait(SUBMIT);
assertTextPresent("Checkbox's value: true");
}
/** TAP5-2075 **/
@Test
public void validate_checkbox_must_be_unchecked()
{
openLinks("Validate Checkbox Must Be Unchecked");
check("//input[@type='checkbox']");
clickAndWait(SUBMIT);
assertTextPresent("You must uncheck Checkbox.");
uncheck("//input[@type='checkbox']");
clickAndWait(SUBMIT);
assertTextPresent("Checkbox's value: false");
}
// TAP5-2204
@Test
public void select_model_with_auto_security_and_non_persistent_model() throws Exception
{
openLinks("Select Demo");
select("month", "label=August");
clickAndWait(SUBMIT);
assertTextPresent("Selected month: August");
}
}
|
googleapis/google-cloud-java | 37,763 | java-discoveryengine/proto-google-cloud-discoveryengine-v1alpha/src/main/java/com/google/cloud/discoveryengine/v1alpha/ListChunksResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1alpha/chunk_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1alpha;
/**
*
*
* <pre>
* Response message for
* [ChunkService.ListChunks][google.cloud.discoveryengine.v1alpha.ChunkService.ListChunks]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1alpha.ListChunksResponse}
*/
public final class ListChunksResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1alpha.ListChunksResponse)
ListChunksResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListChunksResponse.newBuilder() to construct.
private ListChunksResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListChunksResponse() {
chunks_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListChunksResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1alpha.ChunkServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_ListChunksResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1alpha.ChunkServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_ListChunksResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1alpha.ListChunksResponse.class,
com.google.cloud.discoveryengine.v1alpha.ListChunksResponse.Builder.class);
}
public static final int CHUNKS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.discoveryengine.v1alpha.Chunk> chunks_;
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.discoveryengine.v1alpha.Chunk> getChunksList() {
return chunks_;
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.discoveryengine.v1alpha.ChunkOrBuilder>
getChunksOrBuilderList() {
return chunks_;
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
@java.lang.Override
public int getChunksCount() {
return chunks_.size();
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.Chunk getChunks(int index) {
return chunks_.get(index);
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ChunkOrBuilder getChunksOrBuilder(int index) {
return chunks_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as
* [ListChunksRequest.page_token][google.cloud.discoveryengine.v1alpha.ListChunksRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListChunksRequest.page_token][google.cloud.discoveryengine.v1alpha.ListChunksRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < chunks_.size(); i++) {
output.writeMessage(1, chunks_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < chunks_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, chunks_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1alpha.ListChunksResponse)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1alpha.ListChunksResponse other =
(com.google.cloud.discoveryengine.v1alpha.ListChunksResponse) obj;
if (!getChunksList().equals(other.getChunksList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getChunksCount() > 0) {
hash = (37 * hash) + CHUNKS_FIELD_NUMBER;
hash = (53 * hash) + getChunksList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1alpha.ListChunksResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.ListChunksResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ListChunksResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.ListChunksResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ListChunksResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.ListChunksResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ListChunksResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.ListChunksResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ListChunksResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.ListChunksResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.ListChunksResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.ListChunksResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1alpha.ListChunksResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [ChunkService.ListChunks][google.cloud.discoveryengine.v1alpha.ChunkService.ListChunks]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1alpha.ListChunksResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1alpha.ListChunksResponse)
com.google.cloud.discoveryengine.v1alpha.ListChunksResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1alpha.ChunkServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_ListChunksResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1alpha.ChunkServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_ListChunksResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1alpha.ListChunksResponse.class,
com.google.cloud.discoveryengine.v1alpha.ListChunksResponse.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1alpha.ListChunksResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (chunksBuilder_ == null) {
chunks_ = java.util.Collections.emptyList();
} else {
chunks_ = null;
chunksBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1alpha.ChunkServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_ListChunksResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ListChunksResponse getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1alpha.ListChunksResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ListChunksResponse build() {
com.google.cloud.discoveryengine.v1alpha.ListChunksResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ListChunksResponse buildPartial() {
com.google.cloud.discoveryengine.v1alpha.ListChunksResponse result =
new com.google.cloud.discoveryengine.v1alpha.ListChunksResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.discoveryengine.v1alpha.ListChunksResponse result) {
if (chunksBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
chunks_ = java.util.Collections.unmodifiableList(chunks_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.chunks_ = chunks_;
} else {
result.chunks_ = chunksBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.discoveryengine.v1alpha.ListChunksResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1alpha.ListChunksResponse) {
return mergeFrom((com.google.cloud.discoveryengine.v1alpha.ListChunksResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1alpha.ListChunksResponse other) {
if (other == com.google.cloud.discoveryengine.v1alpha.ListChunksResponse.getDefaultInstance())
return this;
if (chunksBuilder_ == null) {
if (!other.chunks_.isEmpty()) {
if (chunks_.isEmpty()) {
chunks_ = other.chunks_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureChunksIsMutable();
chunks_.addAll(other.chunks_);
}
onChanged();
}
} else {
if (!other.chunks_.isEmpty()) {
if (chunksBuilder_.isEmpty()) {
chunksBuilder_.dispose();
chunksBuilder_ = null;
chunks_ = other.chunks_;
bitField0_ = (bitField0_ & ~0x00000001);
chunksBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getChunksFieldBuilder()
: null;
} else {
chunksBuilder_.addAllMessages(other.chunks_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.discoveryengine.v1alpha.Chunk m =
input.readMessage(
com.google.cloud.discoveryengine.v1alpha.Chunk.parser(), extensionRegistry);
if (chunksBuilder_ == null) {
ensureChunksIsMutable();
chunks_.add(m);
} else {
chunksBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.discoveryengine.v1alpha.Chunk> chunks_ =
java.util.Collections.emptyList();
private void ensureChunksIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
chunks_ = new java.util.ArrayList<com.google.cloud.discoveryengine.v1alpha.Chunk>(chunks_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1alpha.Chunk,
com.google.cloud.discoveryengine.v1alpha.Chunk.Builder,
com.google.cloud.discoveryengine.v1alpha.ChunkOrBuilder>
chunksBuilder_;
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
public java.util.List<com.google.cloud.discoveryengine.v1alpha.Chunk> getChunksList() {
if (chunksBuilder_ == null) {
return java.util.Collections.unmodifiableList(chunks_);
} else {
return chunksBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
public int getChunksCount() {
if (chunksBuilder_ == null) {
return chunks_.size();
} else {
return chunksBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
public com.google.cloud.discoveryengine.v1alpha.Chunk getChunks(int index) {
if (chunksBuilder_ == null) {
return chunks_.get(index);
} else {
return chunksBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
public Builder setChunks(int index, com.google.cloud.discoveryengine.v1alpha.Chunk value) {
if (chunksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureChunksIsMutable();
chunks_.set(index, value);
onChanged();
} else {
chunksBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
public Builder setChunks(
int index, com.google.cloud.discoveryengine.v1alpha.Chunk.Builder builderForValue) {
if (chunksBuilder_ == null) {
ensureChunksIsMutable();
chunks_.set(index, builderForValue.build());
onChanged();
} else {
chunksBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
public Builder addChunks(com.google.cloud.discoveryengine.v1alpha.Chunk value) {
if (chunksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureChunksIsMutable();
chunks_.add(value);
onChanged();
} else {
chunksBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
public Builder addChunks(int index, com.google.cloud.discoveryengine.v1alpha.Chunk value) {
if (chunksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureChunksIsMutable();
chunks_.add(index, value);
onChanged();
} else {
chunksBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
public Builder addChunks(
com.google.cloud.discoveryengine.v1alpha.Chunk.Builder builderForValue) {
if (chunksBuilder_ == null) {
ensureChunksIsMutable();
chunks_.add(builderForValue.build());
onChanged();
} else {
chunksBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
public Builder addChunks(
int index, com.google.cloud.discoveryengine.v1alpha.Chunk.Builder builderForValue) {
if (chunksBuilder_ == null) {
ensureChunksIsMutable();
chunks_.add(index, builderForValue.build());
onChanged();
} else {
chunksBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
public Builder addAllChunks(
java.lang.Iterable<? extends com.google.cloud.discoveryengine.v1alpha.Chunk> values) {
if (chunksBuilder_ == null) {
ensureChunksIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, chunks_);
onChanged();
} else {
chunksBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
public Builder clearChunks() {
if (chunksBuilder_ == null) {
chunks_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
chunksBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
public Builder removeChunks(int index) {
if (chunksBuilder_ == null) {
ensureChunksIsMutable();
chunks_.remove(index);
onChanged();
} else {
chunksBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
public com.google.cloud.discoveryengine.v1alpha.Chunk.Builder getChunksBuilder(int index) {
return getChunksFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
public com.google.cloud.discoveryengine.v1alpha.ChunkOrBuilder getChunksOrBuilder(int index) {
if (chunksBuilder_ == null) {
return chunks_.get(index);
} else {
return chunksBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
public java.util.List<? extends com.google.cloud.discoveryengine.v1alpha.ChunkOrBuilder>
getChunksOrBuilderList() {
if (chunksBuilder_ != null) {
return chunksBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(chunks_);
}
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
public com.google.cloud.discoveryengine.v1alpha.Chunk.Builder addChunksBuilder() {
return getChunksFieldBuilder()
.addBuilder(com.google.cloud.discoveryengine.v1alpha.Chunk.getDefaultInstance());
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
public com.google.cloud.discoveryengine.v1alpha.Chunk.Builder addChunksBuilder(int index) {
return getChunksFieldBuilder()
.addBuilder(index, com.google.cloud.discoveryengine.v1alpha.Chunk.getDefaultInstance());
}
/**
*
*
* <pre>
* The [Chunk][google.cloud.discoveryengine.v1alpha.Chunk]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1alpha.Chunk chunks = 1;</code>
*/
public java.util.List<com.google.cloud.discoveryengine.v1alpha.Chunk.Builder>
getChunksBuilderList() {
return getChunksFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1alpha.Chunk,
com.google.cloud.discoveryengine.v1alpha.Chunk.Builder,
com.google.cloud.discoveryengine.v1alpha.ChunkOrBuilder>
getChunksFieldBuilder() {
if (chunksBuilder_ == null) {
chunksBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1alpha.Chunk,
com.google.cloud.discoveryengine.v1alpha.Chunk.Builder,
com.google.cloud.discoveryengine.v1alpha.ChunkOrBuilder>(
chunks_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
chunks_ = null;
}
return chunksBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as
* [ListChunksRequest.page_token][google.cloud.discoveryengine.v1alpha.ListChunksRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListChunksRequest.page_token][google.cloud.discoveryengine.v1alpha.ListChunksRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListChunksRequest.page_token][google.cloud.discoveryengine.v1alpha.ListChunksRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListChunksRequest.page_token][google.cloud.discoveryengine.v1alpha.ListChunksRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as
* [ListChunksRequest.page_token][google.cloud.discoveryengine.v1alpha.ListChunksRequest.page_token]
* to retrieve the next page. If this field is omitted, there are no
* subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1alpha.ListChunksResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1alpha.ListChunksResponse)
private static final com.google.cloud.discoveryengine.v1alpha.ListChunksResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1alpha.ListChunksResponse();
}
public static com.google.cloud.discoveryengine.v1alpha.ListChunksResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListChunksResponse> PARSER =
new com.google.protobuf.AbstractParser<ListChunksResponse>() {
@java.lang.Override
public ListChunksResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListChunksResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListChunksResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.ListChunksResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/nosql | 37,855 | kvmain/src/main/java/com/sleepycat/je/tree/LN.java | /*-
* Copyright (C) 2002, 2025, Oracle and/or its affiliates. All rights reserved.
*
* This file was distributed by Oracle as part of a version of Oracle NoSQL
* Database made available at:
*
* http://www.oracle.com/technetwork/database/database-technologies/nosqldb/downloads/index.html
*
* Please see the LICENSE file included in the top-level directory of the
* appropriate version of Oracle NoSQL Database for a copy of the license and
* additional information.
*/
package com.sleepycat.je.tree;
import static com.sleepycat.je.utilint.VLSN.NULL_VLSN;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import com.sleepycat.je.beforeimage.BeforeImageContext;
import com.sleepycat.je.DatabaseEntry;
import com.sleepycat.je.DatabaseException;
import com.sleepycat.je.EnvironmentFailureException;
import com.sleepycat.je.dbi.DatabaseImpl;
import com.sleepycat.je.dbi.EnvironmentFailureReason;
import com.sleepycat.je.dbi.EnvironmentImpl;
import com.sleepycat.je.dbi.INList;
import com.sleepycat.je.dbi.MemoryBudget;
import com.sleepycat.je.log.LogEntryType;
import com.sleepycat.je.log.LogItem;
import com.sleepycat.je.log.LogParams;
import com.sleepycat.je.log.LogUtils;
import com.sleepycat.je.log.Loggable;
import com.sleepycat.je.log.Provisional;
import com.sleepycat.je.log.ReplicationContext;
import com.sleepycat.je.log.VersionedWriteLoggable;
import com.sleepycat.je.log.entry.LNLogEntry;
import com.sleepycat.je.txn.LockGrantType;
import com.sleepycat.je.txn.LockResult;
import com.sleepycat.je.txn.LockType;
import com.sleepycat.je.txn.Locker;
import com.sleepycat.je.txn.Txn;
import com.sleepycat.je.txn.WriteLockInfo;
import com.sleepycat.je.utilint.DbLsn;
import com.sleepycat.je.utilint.SizeofMarker;
import com.sleepycat.je.utilint.VLSN;
/**
* An LN represents a Leaf Node in the JE tree.
*/
public class LN extends Node implements VersionedWriteLoggable {
private static final String BEGIN_TAG = "<ln>";
private static final String END_TAG = "</ln>";
/**
* The log version of the most recent format change for this loggable.
*
* @see #getLastFormatChange
*/
private static final int LAST_FORMAT_CHANGE = 8;
private byte[] data;
/*
* Flags: bit fields
*/
private static final int FETCHED_COLD_BIT = 0x80000000;
private int flags; // not persistent
/**
* A cached version of the VLSN and modification time that are stored in
* the LNLogEntry. These are used to cache these values in the Btree (BIN)
* when the LN is resident.
*/
private long vlsnSequence = NULL_VLSN;
private long modificationTime = 0;
/**
* Create an empty LN, to be filled in from the log.
*/
public LN() {
this.data = null;
}
/**
* Create a new LN from a byte array. Pass a null byte array to create a
* deleted LN.
*
* Does NOT copy the byte array, so after calling this method the array is
* "owned" by the Btree and should not be modified.
*
* The envImpl param may be used in the future to create LN subclasses.
*/
public static LN makeLN(
@SuppressWarnings("unused")
EnvironmentImpl envImpl,
byte[] dataParam) {
return new LN(dataParam);
}
/**
* Create a new LN from a DatabaseEntry. Makes a copy of the byte array.
*
* The envImpl param may be used in the future to create LN subclasses.
*/
public static LN makeLN(
@SuppressWarnings("unused")
EnvironmentImpl envImpl,
DatabaseEntry dbt) {
return new LN(dbt);
}
/**
* Does NOT copy the byte array, so after calling this method the array is
* "owned" by the Btree and should not be modified.
*/
LN(final byte[] data) {
if (data == null) {
this.data = null;
} else if (data.length == 0) {
this.data = LogUtils.ZERO_LENGTH_BYTE_ARRAY;
} else {
this.data = data;
}
}
/**
* Makes a copy of the byte array.
*/
LN(DatabaseEntry dbt) {
byte[] dat = dbt.getData();
if (dat == null) {
data = null;
} else if (dbt.getPartial()) {
init(dat,
dbt.getOffset(),
dbt.getPartialOffset() + dbt.getSize(),
dbt.getPartialOffset(),
dbt.getSize());
} else {
init(dat, dbt.getOffset(), dbt.getSize());
}
}
/** For Sizeof. */
public LN(@SuppressWarnings("unused") SizeofMarker marker,
DatabaseEntry dbt) {
this(dbt);
}
private void init(byte[] data, int off, int len, int doff, int dlen) {
if (len == 0) {
this.data = LogUtils.ZERO_LENGTH_BYTE_ARRAY;
} else {
this.data = new byte[len];
System.arraycopy(data, off, this.data, doff, dlen);
}
}
private void init(byte[] data, int off, int len) {
init(data, off, len, 0, len);
}
public byte[] getData() {
return data;
}
public int getDataOffset() {
assert data != null : "Not allowed for deleted LN";
return 0;
}
public int getDataSize() {
assert data != null : "Not allowed for deleted LN";
return data.length;
}
public boolean isDeleted() {
return (data == null);
}
@Override
public boolean isLN() {
return true;
}
void makeDeleted() {
data = null;
}
public boolean getFetchedCold() {
return ((flags & FETCHED_COLD_BIT) != 0);
}
public void setFetchedCold(boolean val) {
if (val) {
flags |= FETCHED_COLD_BIT;
} else {
flags &= ~FETCHED_COLD_BIT;
}
}
/**
* Initialize LN after fetching from disk.
*/
public void initialize(DatabaseImpl db) {
/*
* This flag is initially true for a fetched LN, and will be set to
* false if the LN is accessed with any CacheMode other than UNCHANGED.
*/
setFetchedCold(true);
}
/**
* Returns the cached VLSN that was copied from the LNLogEntry, or
* {@link VLSN#NULL_VLSN} if the log entry has no VLSN.
* @see #vlsnSequence
*/
public long getVLSNSequence() {
return vlsnSequence;
}
/**
* Caches a copy of the VLSN from the LNLogEntry after a read or write.
* @see #vlsnSequence
*/
public void setVLSNSequence(long seq) {
vlsnSequence = seq;
}
/**
* Returns the cached modification time that was copied from the
* LNLogEntry.
* @see #modificationTime
*/
public long getModificationTime() {
return modificationTime;
}
/**
* Caches a copy of the modification time from the LNLogEntry after a read
* or write.
* @see #modificationTime
*/
public void setModificationTime(long time) {
modificationTime = time;
}
/*
* If you get to an LN, this subtree isn't valid for delete. True, the LN
* may have been deleted, but you can't be sure without taking a lock, and
* the validate -subtree-for-delete process assumes that bin compressing
* has happened and there are no committed, deleted LNS hanging off the
* BIN.
*/
@Override
boolean isValidForDelete() {
return false;
}
/**
* Returns true by default, but is overridden by MapLN to prevent eviction
* of open databases. This method is meant to be a guaranteed check and is
* used after a BIN has been selected for LN stripping but before actually
* stripping an LN. [#13415]
* @throws DatabaseException from subclasses.
*/
boolean isEvictable()
throws DatabaseException {
return true;
}
public void delete() {
makeDeleted();
}
public void modify(byte[] newData) {
data = newData;
}
/**
* Sets data to empty and returns old data. Called when converting an old
* format LN in a duplicates DB.
*/
public byte[] setEmpty() {
final byte[] retVal = data;
data = Key.EMPTY_KEY;
return retVal;
}
/**
* Add yourself to the in memory list if you're a type of node that should
* belong.
*/
@Override
void rebuildINList(INList inList) {
/*
* Don't add, LNs don't belong on the list.
*/
}
/**
* Compute the approximate size of this node in memory for evictor
* invocation purposes.
*/
@Override
public long getMemorySizeIncludedByParent() {
int size = MemoryBudget.LN_OVERHEAD;
if (data != null) {
size += MemoryBudget.byteArraySize(getDataSize());
}
return size;
}
/*
* Dumping
*/
public String beginTag() {
return BEGIN_TAG;
}
public String endTag() {
return END_TAG;
}
@Override
public String dumpString(int nSpaces, boolean dumpTags) {
StringBuilder self = new StringBuilder();
if (dumpTags) {
self.append(TreeUtils.indent(nSpaces));
self.append(beginTag());
self.append('\n');
}
self.append(super.dumpString(nSpaces + 2, true));
self.append('\n');
if (data != null) {
self.append(TreeUtils.indent(nSpaces+2));
self.append("<data>");
self.append(Key.DUMP_TYPE.dumpByteArray(
data, getDataOffset(), getDataSize()));
self.append("</data>");
self.append('\n');
}
if (dumpTags) {
self.append(TreeUtils.indent(nSpaces));
self.append(endTag());
}
return self.toString();
}
/*
* Logging Support
*/
/*
* Lock the new LSN immediately after logging, with the BIN latched.
* Lock non-blocking, since no contention is possible on the new LSN.
* If the locker is transactional, a new WriteLockInfo is created for
* the new LSN and stored in the locker. lockResult points to that
* WriteLockInfo. Since this new WriteLockInfo and the WriteLockInfo
* given as input to this method refer to the same logical record,
* the info from the given WriteLockInfo is copied to the new one.
*/
private void getPostLogLock(
final Locker locker,
final LogItem item,
final DatabaseImpl dbImpl,
final WriteLockInfo writeLockInfo
) {
final long newLsn = item.lsn;
/* Null cursor param is OK because this is a not a read lock. */
final LockResult lockResult = locker.postLogNonBlockingLock(
newLsn, LockType.WRITE, false /*jumpAheadOfWaiters*/, dbImpl,
null);
assert lockResult.getLockGrant() != LockGrantType.DENIED :
DbLsn.getNoFormatString(newLsn);
lockResult.copyWriteLockInfo(writeLockInfo);
}
/**
* Generate and write to the log a logrec describing an operation O that
* is being performed on a record R with key K. O may be an insertion,
* update, deletion, migration.
*
* Let T be the locker performing O. T is null in case of DW eviction/ckpt.
* Otherwise, T holds a lock on R and it will keep that lock until it
* terminates. In case of a CUD op, the lock is an exclusive one; in
* case of LN migration, it's a shared one (and T is non-transactional).
*
* - Let Rc be the current version of R (before O). The absence of R from
* the DB is considered as a special "deleted" version. Rc may be the
* deleted version.
* - If T is a Txn, let Ra be the version of R before T write-locked R. Ra
* may be the deleted version. Ra and Rc will be the same if O is the
* very 1st op on R by T.
* - Let Rn be R's new version (after O). Rc and Rn will be the same if O
* is migration or DW eviction/ckpt.
*
* - Let Ln be the LSN of the logrec that will be generated here to
* describe O.
* - Let Lc be the current LSN value in R's slot, or NULL if no such slot
* exists currently. If an R slot exists, then Lc points to Rc, or may be
* NULL if Rc is the deleted version.
* - If T is a Txn, let La be the LSN value in R's slot at the time T
* write-locked R, or NULL if no such slot existed at that time.
*
* @param locker The locker T. If non-null, a write lock will be acquired
* by T on Ln's LSN.
*
* WARNING: Be sure to pass null for the locker param if the new LSN should
* not be locked.
*
* @param writeLockInfo It is non-null if and only if T is a Txn. It
* contains info that must be included in Ln to make it undoable if T
* aborts. Specifically, it contains:
*
* - abortKD : True if Ra is the deleted version; false otherwise.
* - abortLSN : The La LSN as defined above.
* - abortKey : The key of Ra, if Ra was embedded in the parent BIN and
* the containing DB allows key updates.
* - abortData : The data of Ra, if Ra was embedded in the parent BIN.
*
* When the new LSN is write-locked, a new WriteLockInfo is created and
* the above info is copied into it. Normally this parameter should be
* obtained from the prepareForInsert or prepareForUpdate method of
* CursorImpl.LockStanding.
*
* @param newEmbeddedLN Whether Rn will be embedded into the parent BIN.
* If true, Ln will be counted as an "immediately obsolete" logrec.
*
* @param newKey Rn's key. Note: Rn's data is not passed as a parameter to
* this method because it is stored in this LN. Rn (key and data) will be
* stored in Ln. Rn's key will also be stored in the parent BIN, and if
* newEmbeddedLN is true, Rn's data too will be stored there.
*
* @param newExpiration the new expiration time in days or hours.
*
* @param newExpirationInHours whether the new expiration time is in hours.
*
* @param currEmbeddedLN Whether Rc's data is embedded into the parent
* BIN. If true, Lc has already been counted obsolete.
*
* @param currLsn The Lc LSN as defined above. Is given as a param to this
* method to count the associated logrec as obsolete (which must done under
* the LWL), if it has not been counted already.
*
* @param currSize The size of Lc (needed for obsolete counting).
*
* @param isInsertion True if the operation is an insertion (including
* slot reuse). False otherwise.
*/
public LogItem log(
final EnvironmentImpl envImpl,
final DatabaseImpl dbImpl,
final Locker locker,
final WriteLockInfo writeLockInfo,
final boolean newEmbeddedLN,
final byte[] newKey,
final int newExpiration,
final boolean newExpirationInHours,
final long newModificationTime,
final boolean newTombstone,
final boolean newBlindDeletion,
final boolean currEmbeddedLN,
final long currLsn,
final int currSize,
final boolean isInsertion,
final boolean backgroundIO,
ReplicationContext repContext,
final BeforeImageContext bImgCtx)
throws DatabaseException {
assert !(dbImpl.getDbType().isInternal() && newEmbeddedLN);
assert !(dbImpl.getSortedDuplicates() && newModificationTime != 0);
if (envImpl.isReadOnly()) {
/* Returning a NULL_LSN will not allow locking. */
throw EnvironmentFailureException.unexpectedState(
"Cannot log LNs in read-only env.");
}
/*
* Check that a replicated txn is used for writing to a replicated DB,
* and a non-replicated locker is used for writing to a
* non-replicated DB. This is critical for avoiding corruption when HA
* failover occurs [#23234] [#23330].
*
* Two cases are exempt from this rule:
*
* - The locker is null only when performing internal logging (not a
* user operation), such as cleaner migration This is always
* non-transactional and non-replicated, so we can skip this check.
* Note that the cleaner may migrate an LN in a replicated DB, but
* this is not part ot the rep stream.
*
* - Some DBs contain a mixture of replicated and non-replicated
* records. For example, only NameLNs that identify replicated DBs
* are replicated, not all NameLNs in the naming DB, so the naming
* DB is exempt.
*
* This guard should never fire because of two checks made prior to
* logging:
*
* - When a user txn in a replicated environment is not configured for
* local-write and a write operation is attempted (or when the
* opposite is true), the Cursor class will throw
* UnsupportedOperationException. See Locker.isLocalWrite.
*
* - On a replica, writes to replicated DBs are disallowed even when
* local-write is false. This is enforced by the ReadonlyTxn class
* which throws ReplicaWriteException in this case.
*/
final boolean isMixedRepDB = dbImpl.getDbType().isMixedReplication() ||
dbImpl.getDbType().isMixedTransactional();
if (!isMixedRepDB &&
envImpl.isReplicated() &&
locker != null &&
dbImpl.isReplicated() != locker.isReplicated()) {
throw EnvironmentFailureException.unexpectedState(
(locker.isReplicated() ?
"Rep txn used to write to non-rep DB" :
"Non-rep txn used to write to rep DB") +
", class = " + locker.getClass().getName() +
", txnId = " + locker.getId() +
", dbName = " + dbImpl.getName());
}
/*
* As an additional safeguard, check that a replicated txn is used when
* the operation is part of the rep stream, and that the inverse is
* also true. Mixed rep DBs are exempt for the same reason as above.
*/
if (!isMixedRepDB) {
boolean isRepLocker = (locker != null) && locker.isReplicated();
if (repContext.inReplicationStream() != isRepLocker) {
throw EnvironmentFailureException.unexpectedState(
(isRepLocker ?
"Rep txn used to write outside of rep stream" :
"Non-rep txn used to write in rep stream") +
((locker != null) ?
(", class = " + locker.getClass().getName() +
", txnId = " + locker.getId()) :
", null locker") +
", dbName = " + dbImpl.getName());
}
}
LogEntryType entryType;
Txn txn = null;
long abortLsn = DbLsn.NULL_LSN;
boolean abortKD = false;
byte[] abortKey = null;
byte[] abortData = null;
long abortVLSN = NULL_VLSN;
int abortExpiration = 0;
boolean abortExpirationInHours = false;
long abortModificationTime = 0;
boolean abortTombstone = false;
LogParams params = new LogParams();
if (locker != null && locker.isTransactional()) {
entryType = getLogType(isInsertion, true, dbImpl);
txn = locker.getTxnLocker();
assert(txn != null);
abortLsn = writeLockInfo.getAbortLsn();
abortKD = writeLockInfo.getAbortKnownDeleted();
abortKey = writeLockInfo.getAbortKey();
abortData = writeLockInfo.getAbortData();
abortVLSN = writeLockInfo.getAbortVLSN();
abortExpiration = writeLockInfo.getAbortExpiration();
abortExpirationInHours = writeLockInfo.isAbortExpirationInHours();
abortModificationTime = writeLockInfo.getAbortModificationTime();
abortTombstone = writeLockInfo.getAbortTombstone();
params.obsoleteDupsAllowed = locker.isRolledBack();
} else {
entryType = getLogType(isInsertion, false, dbImpl);
}
/*
* Determine whether the prior version Rc was counted earlier as an
* "immediately obsolete" logrec. This includes the cases where the
* DB is a dups DB, or the current op is an insertion (which implies
* Rc is a deletion and as such has been counted already) or Rc is
* embedded.
*/
boolean currImmediatelyObsolete =
dbImpl.isLNImmediatelyObsolete() ||
isInsertion ||
currEmbeddedLN;
int priorSize = currImmediatelyObsolete ? 0 : currSize;
long priorLsn = (priorSize == 0) ? DbLsn.NULL_LSN : currLsn;
/*
* If currImmediatelyObsolete, pass zero/NULL_LSN for the prior
* size/lsn. Recovery uses these values to count prior versions
* obsolete, but only when they are not immediately obsolete.
*/
params.entry = createLogEntry(
entryType, dbImpl, txn,
abortLsn, abortKD, abortKey, abortData, abortVLSN,
abortExpiration, abortExpirationInHours,
abortModificationTime, abortTombstone,
newKey, newEmbeddedLN, newExpiration, newExpirationInHours,
newModificationTime, newTombstone, newBlindDeletion,
priorSize, priorLsn, repContext, bImgCtx);
/* LNs are never provisional. */
params.provisional = Provisional.NO;
/*
* Decide whether to count the current record version as obsolete
* during logging. Rc should not be counted as obsolete if:
* (a) Rc == Ra; Ra (i.e. abortLsn) will be counted obsolete during
* commit, or
* (b) Rc was counted earlier as an "immediately obsolete" logrec.
*/
if (currLsn != abortLsn && !currImmediatelyObsolete) {
params.oldLsn = currLsn;
params.oldSize = currSize;
}
params.repContext = repContext;
params.backgroundIO = backgroundIO;
params.nodeDb = dbImpl;
/* Save obsolete size information to be used during commit. */
if (txn != null && currLsn == abortLsn) {
writeLockInfo.setAbortLogSize(currSize);
}
/*
* TODO: We could set LogParams.immutableLogEntry to true when the LN
* will be evicted immediately after logging (it is not cached) and
* is replicated and immutable, e.g., index records and deletions.
* This would reduce memory allocation since a cached buffer and its
* associated queue entry would not be needed.
*/
LogItem item;
try {
if (txn != null) {
/*
* Writing an LN_TX entry requires looking at the Txn's
* lastLoggedLsn. The Txn may be used by multiple threads so
* ensure that the view we get is consistent. [#17204]
* The lock on the log has to be acquired in the synchronized
* because other internal threads may try to access the
* lastLoggedLsn and the lock on it, such as
* MasterTxn.convertToReplayTxnAndClose.
*/
synchronized (txn) {
item = envImpl.getLogManager().log(params);
getPostLogLock(locker, item, dbImpl, writeLockInfo);
}
} else {
item = envImpl.getLogManager().log(params);
}
} catch (Throwable e) {
/*
* If any exception occurs while logging an LN, ensure that the
* environment is invalidated. This will also ensure that the txn
* cannot be committed.
*/
if (envImpl.isValid()) {
throw new EnvironmentFailureException(
envImpl, EnvironmentFailureReason.LOG_INCOMPLETE,
"LN could not be logged", e);
} else {
throw e;
}
}
if (txn == null && locker != null) {
getPostLogLock(locker, item, dbImpl, writeLockInfo);
}
/* In a dup DB, do not expect embedded LNs or non-empty data. */
if (dbImpl.getSortedDuplicates() &&
(newEmbeddedLN || (data != null && getDataSize() > 0))) {
throw EnvironmentFailureException.unexpectedState(
envImpl,
"[#25288] emb=" + newEmbeddedLN +
" key=" + Key.getNoFormatString(newKey) +
" data=" + Key.getNoFormatString(
data, getDataOffset(), getDataSize()) +
" lsn=" + DbLsn.getNoFormatString(currLsn));
}
return item;
}
public LogItem log(
final EnvironmentImpl envImpl,
final DatabaseImpl dbImpl,
final Locker locker,
final WriteLockInfo writeLockInfo,
final boolean newEmbeddedLN,
final byte[] newKey,
final int newExpiration,
final boolean newExpirationInHours,
final long newModificationTime,
final boolean newTombstone,
final boolean newBlindDeletion,
final boolean currEmbeddedLN,
final long currLsn,
final int currSize,
final boolean isInsertion,
final boolean backgroundIO,
ReplicationContext repContext)
throws DatabaseException {
return log(envImpl, dbImpl, locker, writeLockInfo, newEmbeddedLN,
newKey, newExpiration, newExpirationInHours,
newModificationTime, newTombstone, newBlindDeletion,
currEmbeddedLN, currLsn, currSize, isInsertion,
backgroundIO, repContext, null);
}
/*
* Each LN knows what kind of log entry it uses to log itself. Overridden
* by subclasses.
*/
protected LNLogEntry<?> createLogEntry(
LogEntryType entryType,
DatabaseImpl dbImpl,
Txn txn,
long abortLsn,
boolean abortKD,
byte[] abortKey,
byte[] abortData,
long abortVLSN,
int abortExpiration,
boolean abortExpirationInHours,
long abortModificationTime,
boolean abortTombstone,
byte[] newKey,
boolean newEmbeddedLN,
int newExpiration,
boolean newExpirationInHours,
long newModificationTime,
boolean newTombstone,
boolean newBlindDeletion,
int priorSize,
long priorLsn,
ReplicationContext repContext,
BeforeImageContext bImgCtx) {
return new LNLogEntry<>(
entryType, dbImpl.getId(), txn,
abortLsn, abortKD, abortKey, abortData, abortVLSN,
abortExpiration, abortExpirationInHours, abortModificationTime,
abortTombstone, newKey, this, newEmbeddedLN,
newExpiration, newExpirationInHours,
newModificationTime, newTombstone, newBlindDeletion,
priorSize, priorLsn, (bImgCtx != null));
}
/**
* @see Node#incFetchStats
*/
@Override
void incFetchStats(EnvironmentImpl envImpl, boolean isMiss) {
envImpl.getEvictor().incLNFetchStats(isMiss);
}
/**
* @see Node#getGenericLogType
*/
@Override
public LogEntryType getGenericLogType() {
return getLogType(true, false, null);
}
protected LogEntryType getLogType(
boolean isInsert,
boolean isTransactional,
DatabaseImpl db) {
if (db != null) {
LogEntryType type = db.getDbType().getLogType(isTransactional);
if (type != null) {
return type;
}
}
if (isDeleted()) {
assert !isInsert;
return isTransactional ?
LogEntryType.LOG_DEL_LN_TRANSACTIONAL :
LogEntryType.LOG_DEL_LN;
}
if (isInsert) {
return isTransactional ?
LogEntryType.LOG_INS_LN_TRANSACTIONAL :
LogEntryType.LOG_INS_LN;
}
return isTransactional ?
LogEntryType.LOG_UPD_LN_TRANSACTIONAL :
LogEntryType.LOG_UPD_LN;
}
/**
* @see VersionedWriteLoggable#getLastFormatChange
*/
@Override
public int getLastFormatChange() {
return LAST_FORMAT_CHANGE;
}
@Override
public Collection<VersionedWriteLoggable> getEmbeddedLoggables() {
return Collections.emptyList();
}
@Override
public int getLogSize() {
return getLogSize(LogEntryType.LOG_VERSION, false /*forReplication*/);
}
@Override
public void writeToLog(final ByteBuffer logBuffer) {
writeToLog(
logBuffer, LogEntryType.LOG_VERSION, false /*forReplication*/);
}
@Override
public int getLogSize(final int logVersion, final boolean forReplication) {
return calcLogSize(isDeleted() ? -1 : getDataSize());
}
/**
* Calculates log size based on given dataLen, which is negative to
* calculate the size of a deleted LN.
*/
private int calcLogSize(int dataLen) {
int size = 0;
if (dataLen < 0) {
size += LogUtils.getPackedIntLogSize(-1);
} else {
size += LogUtils.getPackedIntLogSize(dataLen);
size += dataLen;
}
return size;
}
@Override
public void writeToLog(final ByteBuffer logBuffer,
final int logVersion,
final boolean forReplication) {
if (isDeleted()) {
LogUtils.writePackedInt(logBuffer, -1);
} else {
assert data != null;
LogUtils.writeByteArray(logBuffer,
data, getDataOffset(), getDataSize());
}
}
@Override
public void readFromLog(EnvironmentImpl envImpl,
ByteBuffer itemBuffer,
int entryVersion) {
int size = LogUtils.readPackedInt(itemBuffer);
if (size >= 0) {
data = LogUtils.readBytesNoLength(itemBuffer, size);
}
}
@Override
public boolean hasReplicationFormat() {
return false;
}
@Override
public boolean isReplicationFormatWorthwhile(final ByteBuffer logBuffer,
final int srcVersion,
final int destVersion) {
return false;
}
@Override
public boolean logicalEquals(Loggable other) {
if (!(other instanceof LN)) {
return false;
}
LN otherLN = (LN) other;
/*
* This includes a comparison of deletedness, since the data is null
* for a deleted LN.
*/
return Arrays.equals(getData(), otherLN.getData());
}
@Override
public void dumpLog(StringBuilder sb, boolean verbose) {
sb.append(beginTag());
if (data != null) {
sb.append("<data>");
if (verbose) {
sb.append(Key.DUMP_TYPE.dumpByteArray(
data, getDataOffset(), getDataSize()));
} else {
sb.append("hidden");
}
sb.append("</data>");
}
dumpLogAdditional(sb, verbose);
sb.append(endTag());
}
public void dumpKey(StringBuilder sb, byte[] key) {
sb.append(Key.dumpString(key, 0));
}
/*
* Allows subclasses to add additional fields before the end tag.
*/
protected void dumpLogAdditional(StringBuilder sb,
@SuppressWarnings("unused")
boolean verbose) {
}
/**
* Account for FileSummaryLN's extra marshaled memory. [#17462]
*/
public void addExtraMarshaledMemorySize(BIN parentBIN) {
/* Do nothing here. Overwridden in FileSummaryLN. */
}
/*
* DatabaseEntry utilities
*/
/**
* Copies the non-deleted LN's byte array to the entry. Does not support
* partial data.
*/
public void setEntry(DatabaseEntry entry) {
assert !isDeleted();
int len = getDataSize();
byte[] bytes = new byte[len];
System.arraycopy(data, getDataOffset(), bytes, 0, len);
entry.setData(bytes);
}
/**
* Copies the given byte array to the given destination entry, copying only
* partial data if the entry is specified to be partial. If the byte array
* is null, clears the entry.
*/
public static void setEntry(DatabaseEntry dest, byte[] bytes) {
if (bytes != null) {
boolean partial = dest.getPartial();
int off = partial ? dest.getPartialOffset() : 0;
int len = partial ? dest.getPartialLength() : bytes.length;
if (off + len > bytes.length) {
len = (off > bytes.length) ? 0 : bytes.length - off;
}
byte[] newdata;
if (len == 0) {
newdata = LogUtils.ZERO_LENGTH_BYTE_ARRAY;
} else {
newdata = new byte[len];
System.arraycopy(bytes, off, newdata, 0, len);
}
dest.setData(newdata);
dest.setOffset(0);
dest.setSize(len);
} else {
dest.setData(null);
dest.setOffset(0);
dest.setSize(0);
}
}
/**
* Copies the given source entry to the given destination entry, copying
* only partial data if the destination entry is specified to be partial.
*/
public static void setEntry(DatabaseEntry dest, DatabaseEntry src) {
if (src.getData() != null) {
byte[] srcBytes = src.getData();
boolean partial = dest.getPartial();
int off = partial ? dest.getPartialOffset() : 0;
int len = partial ? dest.getPartialLength() : srcBytes.length;
if (off + len > srcBytes.length) {
len = (off > srcBytes.length) ? 0 : srcBytes.length - off;
}
byte[] newdata;
if (len == 0) {
newdata = LogUtils.ZERO_LENGTH_BYTE_ARRAY;
} else {
newdata = new byte[len];
System.arraycopy(srcBytes, off, newdata, 0, len);
}
dest.setData(newdata);
dest.setOffset(0);
dest.setSize(len);
} else {
dest.setData(null);
dest.setOffset(0);
dest.setSize(0);
}
}
/**
* Returns a byte array that is a complete copy of the data in a
* non-partial entry.
*/
public static byte[] copyEntryData(DatabaseEntry entry) {
assert !entry.getPartial();
int len = entry.getSize();
final byte[] newData =
(len == 0) ? LogUtils.ZERO_LENGTH_BYTE_ARRAY : (new byte[len]);
System.arraycopy(entry.getData(), entry.getOffset(),
newData, 0, len);
return newData;
}
/**
* Merges the partial entry with the given byte array, effectively applying
* a partial entry to an existing record, and returns a enw byte array.
*/
public static byte[] resolvePartialEntry(DatabaseEntry entry,
byte[] foundDataBytes ) {
assert foundDataBytes != null;
final int dlen = entry.getPartialLength();
final int doff = entry.getPartialOffset();
final int origlen = foundDataBytes.length;
final int oldlen = (doff + dlen > origlen) ? (doff + dlen) : origlen;
final int len = oldlen - dlen + entry.getSize();
final byte[] newData;
if (len == 0) {
newData = LogUtils.ZERO_LENGTH_BYTE_ARRAY;
} else {
newData = new byte[len];
}
int pos = 0;
/* Keep 0..doff of the old data (truncating if doff > length). */
int slicelen = (doff < origlen) ? doff : origlen;
if (slicelen > 0) {
System.arraycopy(foundDataBytes, 0, newData, pos, slicelen);
}
pos += doff;
/* Copy in the new data. */
slicelen = entry.getSize();
System.arraycopy(entry.getData(), entry.getOffset(), newData, pos,
slicelen);
pos += slicelen;
/* Append the rest of the old data (if any). */
slicelen = origlen - (doff + dlen);
if (slicelen > 0) {
System.arraycopy(foundDataBytes, doff + dlen, newData, pos,
slicelen);
}
return newData;
}
public static void outputBytes(final DatabaseEntry outputEntry,
final byte[] bytes,
final int offset,
final int size) {
/*
* TODO: Add reusable buffer support, something like this:
*
if (outputEntry.getReusable() &&
outputEntry.getData().length >= size) {
System.arraycopy(bytes, offset, outputEntry.getData(), 0, size);
outputEntry.setOffset(0);
outputEntry.setSize(size);
return;
}
*/
final byte[] buf = new byte[size];
System.arraycopy(bytes, offset, buf, 0, size);
outputEntry.setData(buf);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.